/dbunit/src/main/java/org/jboss/arquillian/persistence/dbunit/dataset/yaml/YamlDataSetProducer.java

https://github.com/arquillian/arquillian-extension-persistence · Java · 163 lines · 117 code · 21 blank · 25 comment · 13 complexity · 8d93d67dc784b8377636bfb69749b47a MD5 · raw file

  1. /*
  2. * JBoss, Home of Professional Open Source
  3. * Copyright 2011 Red Hat Inc. and/or its affiliates and other contributors
  4. * as indicated by the @authors tag. All rights reserved.
  5. * See the copyright.txt in the distribution for a
  6. * full listing of individual contributors.
  7. *
  8. * Licensed under the Apache License, Version 2.0 (the "License");
  9. * you may not use this file except in compliance with the License.
  10. * You may obtain a copy of the License at
  11. * http://www.apache.org/licenses/LICENSE-2.0
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. */
  18. package org.jboss.arquillian.persistence.dbunit.dataset.yaml;
  19. import java.io.InputStream;
  20. import java.util.ArrayList;
  21. import java.util.Collection;
  22. import java.util.HashSet;
  23. import java.util.List;
  24. import java.util.Map;
  25. import org.dbunit.dataset.Column;
  26. import org.dbunit.dataset.DataSetException;
  27. import org.dbunit.dataset.DefaultTableMetaData;
  28. import org.dbunit.dataset.ITableMetaData;
  29. import org.dbunit.dataset.datatype.DataType;
  30. import org.dbunit.dataset.stream.DefaultConsumer;
  31. import org.dbunit.dataset.stream.IDataSetConsumer;
  32. import org.dbunit.dataset.stream.IDataSetProducer;
  33. import org.jboss.arquillian.persistence.dbunit.dataset.Row;
  34. import org.jboss.arquillian.persistence.dbunit.dataset.Table;
  35. import org.yaml.snakeyaml.DumperOptions;
  36. import org.yaml.snakeyaml.Yaml;
  37. import org.yaml.snakeyaml.constructor.Constructor;
  38. import org.yaml.snakeyaml.nodes.Tag;
  39. import org.yaml.snakeyaml.representer.Representer;
  40. import org.yaml.snakeyaml.resolver.Resolver;
  41. /**
  42. * Produces YAML data set from the given file.
  43. *
  44. * @author <a href="mailto:bartosz.majsak@gmail.com">Bartosz Majsak</a>
  45. * @see YamlDataSet
  46. */
  47. public class YamlDataSetProducer implements IDataSetProducer {
  48. private final InputStream input;
  49. private boolean caseSensitiveTableNames;
  50. private IDataSetConsumer consumer = new DefaultConsumer();
  51. public YamlDataSetProducer(InputStream inputStream) {
  52. input = inputStream;
  53. }
  54. @Override
  55. public void setConsumer(IDataSetConsumer consumer) {
  56. this.consumer = consumer;
  57. }
  58. @Override
  59. public void produce() throws DataSetException {
  60. consumer.startDataSet();
  61. @SuppressWarnings("unchecked") final List<Table> tables =
  62. createTables((Map<String, List<Map<String, String>>>) createYamlReader().load(input));
  63. for (Table table : tables) {
  64. ITableMetaData tableMetaData = createTableMetaData(table);
  65. consumer.startTable(tableMetaData);
  66. for (Row row : table.getRows()) {
  67. List<String> values = new ArrayList<String>();
  68. for (Column column : tableMetaData.getColumns()) {
  69. values.add(row.valueOf(column.getColumnName()));
  70. }
  71. consumer.row(values.toArray());
  72. }
  73. consumer.endTable();
  74. }
  75. consumer.endDataSet();
  76. }
  77. public Yaml createYamlReader() {
  78. final Yaml yaml = new Yaml(new Constructor(), new Representer(), new DumperOptions(),
  79. new Resolver() {
  80. @Override
  81. protected void addImplicitResolvers() {
  82. // Intentionally left TIMESTAMP as string to let DBUnit deal with the conversion
  83. addImplicitResolver(Tag.BOOL, BOOL, "yYnNtTfFoO");
  84. addImplicitResolver(Tag.INT, INT, "-+0123456789");
  85. addImplicitResolver(Tag.FLOAT, FLOAT, "-+0123456789.");
  86. addImplicitResolver(Tag.MERGE, MERGE, "<");
  87. addImplicitResolver(Tag.NULL, NULL, "~nN\0");
  88. addImplicitResolver(Tag.NULL, EMPTY, null);
  89. addImplicitResolver(Tag.YAML, YAML, "!&*");
  90. }
  91. });
  92. return yaml;
  93. }
  94. private ITableMetaData createTableMetaData(Table table) {
  95. return new DefaultTableMetaData(table.getTableName(), createColumns(table.getColumns()));
  96. }
  97. private Column[] createColumns(Collection<String> columnNames) {
  98. final List<Column> columns = new ArrayList<Column>();
  99. for (String columnName : columnNames) {
  100. Column column = new Column(columnName, DataType.UNKNOWN);
  101. columns.add(column);
  102. }
  103. return columns.toArray(new Column[columns.size()]);
  104. }
  105. private List<Table> createTables(Map<String, List<Map<String, String>>> yamlStructure) {
  106. final List<Table> tables = new ArrayList<Table>();
  107. for (Map.Entry<String, List<Map<String, String>>> entry : yamlStructure.entrySet()) {
  108. Table table = new Table(entry.getKey());
  109. table.addColumns(extractColumns(entry.getValue()));
  110. table.addRows(extractRows(entry.getValue()));
  111. tables.add(table);
  112. }
  113. return tables;
  114. }
  115. private Collection<Row> extractRows(List<Map<String, String>> rows) {
  116. final Collection<Row> extractedRows = new ArrayList<Row>();
  117. if (rows == null || rows.isEmpty()) {
  118. return extractedRows;
  119. }
  120. for (Map<String, String> row : rows) {
  121. extractedRows.add(new Row(row));
  122. }
  123. return extractedRows;
  124. }
  125. private Collection<String> extractColumns(List<Map<String, String>> rows) {
  126. final Collection<String> columns = new HashSet<String>();
  127. if (rows == null || rows.isEmpty()) {
  128. return columns;
  129. }
  130. for (Map<String, String> row : rows) {
  131. columns.addAll(row.keySet());
  132. }
  133. return columns;
  134. }
  135. // Getters & Setters
  136. public boolean isCaseSensitiveTableNames() {
  137. return caseSensitiveTableNames;
  138. }
  139. public void setCaseSensitiveTableNames(boolean caseSensitiveTableNames) {
  140. this.caseSensitiveTableNames = caseSensitiveTableNames;
  141. }
  142. }