PageRenderTime 20ms CodeModel.GetById 20ms RepoModel.GetById 1ms app.codeStats 0ms

/solr/core/src/test/org/apache/solr/index/hdfs/CheckHdfsIndexTest.java

http://github.com/apache/lucene-solr
Java | 150 lines | 110 code | 23 blank | 17 comment | 2 complexity | 0a31d4405846022bf9a899a27b9c6b8d MD5 | raw file
Possible License(s): LGPL-2.1, CPL-1.0, MPL-2.0-no-copyleft-exception, JSON, Apache-2.0, AGPL-1.0, GPL-2.0, GPL-3.0, MIT, BSD-3-Clause
  1. /*
  2. * Licensed to the Apache Software Foundation (ASF) under one or more
  3. * contributor license agreements. See the NOTICE file distributed with
  4. * this work for additional information regarding copyright ownership.
  5. * The ASF licenses this file to You under the Apache License, Version 2.0
  6. * (the "License"); you may not use this file except in compliance with
  7. * the License. You may obtain a copy of the License at
  8. *
  9. * http://www.apache.org/licenses/LICENSE-2.0
  10. *
  11. * Unless required by applicable law or agreed to in writing, software
  12. * distributed under the License is distributed on an "AS IS" BASIS,
  13. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. * See the License for the specific language governing permissions and
  15. * limitations under the License.
  16. */
  17. package org.apache.solr.index.hdfs;
  18. import java.io.IOException;
  19. import org.apache.hadoop.conf.Configuration;
  20. import org.apache.hadoop.fs.FileSystem;
  21. import org.apache.hadoop.fs.Path;
  22. import org.apache.hadoop.hdfs.MiniDFSCluster;
  23. import org.apache.lucene.index.BaseTestCheckIndex;
  24. import org.apache.lucene.store.Directory;
  25. import org.apache.solr.client.solrj.SolrClient;
  26. import org.apache.solr.client.solrj.SolrQuery;
  27. import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
  28. import org.apache.solr.cloud.hdfs.HdfsTestUtil;
  29. import org.apache.solr.common.util.NamedList;
  30. import org.apache.solr.store.hdfs.HdfsDirectory;
  31. import org.apache.solr.util.BadHdfsThreadsFilter;
  32. import org.junit.After;
  33. import org.junit.AfterClass;
  34. import org.junit.Before;
  35. import org.junit.BeforeClass;
  36. import org.junit.Ignore;
  37. import org.junit.Test;
  38. import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
  39. @ThreadLeakFilters(defaultFilters = true, filters = {
  40. BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
  41. })
  42. // commented out on: 24-Dec-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 12-Jun-2018
  43. public class CheckHdfsIndexTest extends AbstractFullDistribZkTestBase {
  44. private static MiniDFSCluster dfsCluster;
  45. private static Path path;
  46. private BaseTestCheckIndex testCheckIndex;
  47. private Directory directory;
  48. public CheckHdfsIndexTest() {
  49. super();
  50. sliceCount = 1;
  51. fixShardCount(1);
  52. testCheckIndex = new BaseTestCheckIndex();
  53. }
  54. @BeforeClass
  55. public static void setupClass() throws Exception {
  56. dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath());
  57. path = new Path(HdfsTestUtil.getURI(dfsCluster) + "/solr/");
  58. }
  59. @AfterClass
  60. public static void teardownClass() throws Exception {
  61. try {
  62. HdfsTestUtil.teardownClass(dfsCluster);
  63. } finally {
  64. dfsCluster = null;
  65. path = null;
  66. }
  67. }
  68. @Override
  69. @Before
  70. public void setUp() throws Exception {
  71. super.setUp();
  72. Configuration conf = HdfsTestUtil.getClientConfiguration(dfsCluster);
  73. directory = new HdfsDirectory(path, conf);
  74. }
  75. @Override
  76. @After
  77. public void tearDown() throws Exception {
  78. try {
  79. if (null != directory) {
  80. directory.close();
  81. }
  82. } finally {
  83. try(FileSystem fs = FileSystem.get(HdfsTestUtil.getClientConfiguration(dfsCluster))) {
  84. fs.delete(path, true);
  85. } finally {
  86. super.tearDown();
  87. }
  88. }
  89. }
  90. @Override
  91. protected String getDataDir(String dataDir) throws IOException {
  92. return HdfsTestUtil.getDataDir(dfsCluster, dataDir);
  93. }
  94. @Test
  95. public void doTest() throws Exception {
  96. waitForRecoveriesToFinish(false);
  97. indexr(id, 1);
  98. commit();
  99. waitForRecoveriesToFinish(false);
  100. String[] args;
  101. {
  102. SolrClient client = clients.get(0);
  103. NamedList<Object> response = client.query(new SolrQuery().setRequestHandler("/admin/system")).getResponse();
  104. NamedList<Object> coreInfo = (NamedList<Object>) response.get("core");
  105. String indexDir = ((NamedList<Object>) coreInfo.get("directory")).get("data") + "/index";
  106. args = new String[] {indexDir};
  107. }
  108. assertEquals("CheckHdfsIndex return status", 0, CheckHdfsIndex.doMain(args));
  109. }
  110. @Test
  111. public void testDeletedDocs() throws IOException {
  112. testCheckIndex.testDeletedDocs(directory);
  113. }
  114. @Test
  115. public void testChecksumsOnly() throws IOException {
  116. testCheckIndex.testChecksumsOnly(directory);
  117. }
  118. @Test
  119. public void testChecksumsOnlyVerbose() throws IOException {
  120. testCheckIndex.testChecksumsOnlyVerbose(directory);
  121. }
  122. @Test
  123. @Ignore("We explicitly use a NoLockFactory, so this test doesn't make sense.")
  124. public void testObtainsLock() throws IOException {
  125. testCheckIndex.testObtainsLock(directory);
  126. }
  127. }