PageRenderTime 130ms CodeModel.GetById 19ms RepoModel.GetById 0ms app.codeStats 0ms

/test/distributed/org/apache/cassandra/distributed/test/FrozenUDTTest.java

https://github.com/beobal/cassandra
Java | 153 lines | 113 code | 23 blank | 17 comment | 5 complexity | 04cd0e7b68057a0bebf793d8de8c6474 MD5 | raw file
  1. /*
  2. * Licensed to the Apache Software Foundation (ASF) under one
  3. * or more contributor license agreements. See the NOTICE file
  4. * distributed with this work for additional information
  5. * regarding copyright ownership. The ASF licenses this file
  6. * to you under the Apache License, Version 2.0 (the
  7. * "License"); you may not use this file except in compliance
  8. * with the License. You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. */
  18. package org.apache.cassandra.distributed.test;
  19. import java.io.IOException;
  20. import java.util.concurrent.ExecutionException;
  21. import org.junit.Test;
  22. import org.apache.cassandra.distributed.Cluster;
  23. import org.apache.cassandra.distributed.api.ConsistencyLevel;
  24. import org.apache.cassandra.service.StorageService;
  25. import static org.apache.cassandra.distributed.shared.AssertUtils.assertRows;
  26. import static org.apache.cassandra.distributed.shared.AssertUtils.row;
  27. public class FrozenUDTTest extends TestBaseImpl
  28. {
  29. @Test
  30. public void testAddAndUDTField() throws IOException
  31. {
  32. try (Cluster cluster = init(Cluster.build(1).start()))
  33. {
  34. cluster.schemaChange("create type " + KEYSPACE + ".a (foo text)");
  35. cluster.schemaChange("create table " + KEYSPACE + ".x (id int, ck frozen<a>, i int, primary key (id, ck))");
  36. for (int i = 0; i < 10; i++)
  37. cluster.coordinator(1).execute("insert into " + KEYSPACE + ".x (id, ck, i) VALUES (?, " + json(i) + ", ? )", ConsistencyLevel.ALL, i, i);
  38. for (int i = 0; i < 10; i++)
  39. assertRows(cluster.coordinator(1).execute("select i from " + KEYSPACE + ".x WHERE id = ? and ck = " + json(i), ConsistencyLevel.ALL, i),
  40. row(i));
  41. cluster.schemaChange("alter type " + KEYSPACE + ".a add bar text");
  42. for (int i = 5; i < 15; i++)
  43. cluster.coordinator(1).execute("insert into " + KEYSPACE + ".x (id, ck, i) VALUES (?, " + json(i) + ", ? )", ConsistencyLevel.ALL, i, i);
  44. cluster.forEach(i -> i.flush(KEYSPACE));
  45. for (int i = 5; i < 15; i++)
  46. assertRows(cluster.coordinator(1).execute("select i from " + KEYSPACE + ".x WHERE id = ? and ck = " + json(i), ConsistencyLevel.ALL, i),
  47. row(i));
  48. }
  49. }
  50. @Test
  51. public void testEmptyValue() throws IOException
  52. {
  53. try (Cluster cluster = init(Cluster.build(1).start()))
  54. {
  55. cluster.schemaChange("create type " + KEYSPACE + ".a (foo text)");
  56. cluster.schemaChange("create table " + KEYSPACE + ".x (id int, ck frozen<a>, i int, primary key (id, ck))");
  57. cluster.coordinator(1).execute("insert into " + KEYSPACE + ".x (id, ck, i) VALUES (1, system.fromjson('{\"foo\":\"\"}'), 1)", ConsistencyLevel.ALL);
  58. cluster.coordinator(1).execute("insert into " + KEYSPACE + ".x (id, ck, i) VALUES (1, system.fromjson('{\"foo\":\"a\"}'), 2)", ConsistencyLevel.ALL);
  59. cluster.forEach(i -> i.flush(KEYSPACE));
  60. Runnable check = () -> {
  61. assertRows(cluster.coordinator(1).execute("select i from " + KEYSPACE + ".x WHERE id = 1 and ck = system.fromjson('{\"foo\":\"\"}')", ConsistencyLevel.ALL),
  62. row(1));
  63. assertRows(cluster.coordinator(1).execute("select i from " + KEYSPACE + ".x WHERE id = 1 and ck = system.fromjson('{\"foo\":\"a\"}')", ConsistencyLevel.ALL),
  64. row(2));
  65. };
  66. check.run();
  67. cluster.schemaChange("alter type " + KEYSPACE + ".a add bar text");
  68. check.run();
  69. assertRows(cluster.coordinator(1).execute("select i from " + KEYSPACE + ".x WHERE id = 1 and ck = system.fromjson('{\"foo\":\"\",\"bar\":\"\"}')", ConsistencyLevel.ALL));
  70. cluster.coordinator(1).execute("insert into " + KEYSPACE + ".x (id, ck, i) VALUES (1, system.fromjson('{\"foo\":\"\",\"bar\":\"\"}'), 3)", ConsistencyLevel.ALL);
  71. check.run();
  72. assertRows(cluster.coordinator(1).execute("select i from " + KEYSPACE + ".x WHERE id = 1 and ck = system.fromjson('{\"foo\":\"\",\"bar\":\"\"}')", ConsistencyLevel.ALL),
  73. row(3));
  74. }
  75. }
  76. @Test
  77. public void testUpgradeSStables() throws IOException
  78. {
  79. try (Cluster cluster = init(Cluster.build(1).start()))
  80. {
  81. cluster.schemaChange("create type " + KEYSPACE + ".a (foo text)");
  82. cluster.schemaChange("create table " + KEYSPACE + ".x (id int, ck frozen<a>, i int, primary key (id, ck))");
  83. cluster.coordinator(1).execute("insert into " + KEYSPACE + ".x (id, ck, i) VALUES (?, " + json(1) + ", ? )", ConsistencyLevel.ALL, 1, 1);
  84. assertRows(cluster.coordinator(1).execute("select i from " + KEYSPACE + ".x WHERE id = ? and ck = " + json(1), ConsistencyLevel.ALL, 1), row(1));
  85. cluster.forEach(i -> i.flush(KEYSPACE));
  86. assertRows(cluster.coordinator(1).execute("select i from " + KEYSPACE + ".x WHERE id = ? and ck = " + json(1), ConsistencyLevel.ALL, 1), row(1));
  87. cluster.schemaChange("alter type " + KEYSPACE + ".a add bar text");
  88. cluster.coordinator(1).execute("insert into " + KEYSPACE + ".x (id, ck, i) VALUES (?, " + json(2) + ", ? )", ConsistencyLevel.ALL, 2, 2);
  89. cluster.forEach(i -> i.flush(KEYSPACE));
  90. assertRows(cluster.coordinator(1).execute("select i from " + KEYSPACE + ".x WHERE id = ? and ck = " + json(2), ConsistencyLevel.ALL, 2), row(2));
  91. cluster.forEach(i -> i.runOnInstance(() -> {
  92. try
  93. {
  94. StorageService.instance.upgradeSSTables(KEYSPACE, false, "x");
  95. }
  96. catch (IOException | ExecutionException | InterruptedException e)
  97. {
  98. throw new RuntimeException(e);
  99. }
  100. }));
  101. cluster.forEach(i -> i.forceCompact(KEYSPACE, "x"));
  102. for (int i = 1; i < 3; i++)
  103. assertRows(cluster.coordinator(1).execute("select i from " + KEYSPACE + ".x WHERE id = ? and ck = " + json(i), ConsistencyLevel.ALL, i),
  104. row(i));
  105. }
  106. }
  107. @Test
  108. public void testDivergentSchemas() throws Throwable
  109. {
  110. try (Cluster cluster = init(Cluster.create(2)))
  111. {
  112. cluster.schemaChange("create type " + KEYSPACE + ".a (foo text)");
  113. cluster.schemaChange("create table " + KEYSPACE + ".x (id int, ck frozen<a>, i int, primary key (id, ck))");
  114. cluster.get(1).executeInternal("alter type " + KEYSPACE + ".a add bar text");
  115. cluster.coordinator(1).execute("insert into " + KEYSPACE + ".x (id, ck, i) VALUES (?, " + json(1, 1) + ", ? )", ConsistencyLevel.ALL,
  116. 1, 1);
  117. cluster.coordinator(1).execute("insert into " + KEYSPACE + ".x (id, ck, i) VALUES (?, " + json(1, 2) + ", ? )", ConsistencyLevel.ALL,
  118. 2, 2);
  119. cluster.get(2).flush(KEYSPACE);
  120. }
  121. }
  122. private String json(int i)
  123. {
  124. return String.format("system.fromjson('{\"foo\":\"%d\"}')", i);
  125. }
  126. private String json(int i, int j)
  127. {
  128. return String.format("system.fromjson('{\"foo\":\"%d\", \"bar\":\"%d\"}')", i, j);
  129. }
  130. }