PageRenderTime 77ms CodeModel.GetById 11ms RepoModel.GetById 1ms app.codeStats 0ms

/vendor/plugins/ferret/ruby/test/unit/index/tc_index_reader.rb

https://bitbucket.org/mmarini/backpagefirst
Ruby | 699 lines | 552 code | 139 blank | 8 comment | 2 complexity | 82b7d7d06d967afbafbaeaabeacda003 MD5 | raw file
  1. require File.dirname(__FILE__) + "/../../test_helper"
  2. module IndexReaderCommon
  3. include Ferret::Index
  4. include Ferret::Analysis
  5. def test_index_reader
  6. do_test_get_field_names()
  7. do_test_term_enum()
  8. do_test_term_doc_enum()
  9. do_test_term_vectors()
  10. do_test_get_doc()
  11. end
  12. def do_test_get_field_names()
  13. field_names = @ir.field_names
  14. assert(field_names.include?(:body))
  15. assert(field_names.include?(:changing_field))
  16. assert(field_names.include?(:author))
  17. assert(field_names.include?(:title))
  18. assert(field_names.include?(:text))
  19. assert(field_names.include?(:year))
  20. end
  21. def do_test_term_enum()
  22. te = @ir.terms(:author)
  23. assert_equal('[{"term":"Leo","frequency":1},{"term":"Tolstoy","frequency":1}]', te.to_json);
  24. te.field = :author
  25. assert_equal('[["Leo",1],["Tolstoy",1]]', te.to_json(:fast));
  26. te.field = :author
  27. assert(te.next?)
  28. assert_equal("Leo", te.term)
  29. assert_equal(1, te.doc_freq)
  30. assert(te.next?)
  31. assert_equal("Tolstoy", te.term)
  32. assert_equal(1, te.doc_freq)
  33. assert(! te.next?)
  34. te.field = :body
  35. assert(te.next?)
  36. assert_equal("And", te.term)
  37. assert_equal(1, te.doc_freq)
  38. assert(te.skip_to("Not"))
  39. assert_equal("Not", te.term)
  40. assert_equal(1, te.doc_freq)
  41. assert(te.next?)
  42. assert_equal("Random", te.term)
  43. assert_equal(16, te.doc_freq)
  44. te.field = :text
  45. assert(te.skip_to("which"))
  46. assert("which", te.term)
  47. assert_equal(1, te.doc_freq)
  48. assert(! te.next?)
  49. te.field = :title
  50. assert(te.next?)
  51. assert_equal("War And Peace", te.term)
  52. assert_equal(1, te.doc_freq)
  53. assert(!te.next?)
  54. expected = %w{is 1 more 1 not 1 skip 42 stored 1 text 1 which 1}
  55. te = @ir.terms(:text)
  56. te.each do |term, doc_freq|
  57. assert_equal(expected.shift, term)
  58. assert_equal(expected.shift.to_i, doc_freq)
  59. end
  60. te = @ir.terms_from(:body, "Not")
  61. assert_equal("Not", te.term)
  62. assert_equal(1, te.doc_freq)
  63. assert(te.next?)
  64. assert_equal("Random", te.term)
  65. assert_equal(16, te.doc_freq)
  66. end
  67. def do_test_term_doc_enum()
  68. assert_equal(IndexTestHelper::INDEX_TEST_DOCS.size, @ir.num_docs())
  69. assert_equal(IndexTestHelper::INDEX_TEST_DOCS.size, @ir.max_doc())
  70. assert_equal(4, @ir.doc_freq(:body, "Wally"))
  71. tde = @ir.term_docs_for(:body, "Wally")
  72. [
  73. [ 0, 1],
  74. [ 5, 1],
  75. [18, 3],
  76. [20, 6]
  77. ].each do |doc, freq|
  78. assert(tde.next?)
  79. assert_equal(doc, tde.doc())
  80. assert_equal(freq, tde.freq())
  81. end
  82. assert(! tde.next?)
  83. tde = @ir.term_docs_for(:body, "Wally")
  84. assert_equal('[{"document":0,"frequency":1},{"document":5,"frequency":1},{"document":18,"frequency":3},{"document":20,"frequency":6}]', tde.to_json)
  85. tde = @ir.term_docs_for(:body, "Wally")
  86. assert_equal('[[0,1],[5,1],[18,3],[20,6]]', tde.to_json(:fast))
  87. do_test_term_docpos_enum_skip_to(tde)
  88. # test term positions
  89. tde = @ir.term_positions_for(:body, "read")
  90. [
  91. [false, 1, 1, [3]],
  92. [false, 2, 2, [1, 4]],
  93. [false, 6, 4, [3, 4]],
  94. [false, 9, 3, [0, 4]],
  95. [ true, 16, 2, [2]],
  96. [ true, 21, 6, [3, 4, 5, 8, 9, 10]]
  97. ].each do |skip, doc, freq, positions|
  98. if skip
  99. assert(tde.skip_to(doc))
  100. else
  101. assert(tde.next?)
  102. end
  103. assert_equal(doc, tde.doc())
  104. assert_equal(freq, tde.freq())
  105. positions.each {|pos| assert_equal(pos, tde.next_position())}
  106. end
  107. assert_nil(tde.next_position())
  108. assert(! tde.next?)
  109. tde = @ir.term_positions_for(:body, "read")
  110. assert_equal('[' +
  111. '{"document":1,"frequency":1,"positions":[3]},' +
  112. '{"document":2,"frequency":2,"positions":[1,4]},' +
  113. '{"document":6,"frequency":4,"positions":[3,4,5,6]},' +
  114. '{"document":9,"frequency":3,"positions":[0,4,13]},' +
  115. '{"document":10,"frequency":1,"positions":[1]},' +
  116. '{"document":16,"frequency":2,"positions":[2,3]},' +
  117. '{"document":17,"frequency":1,"positions":[2]},' +
  118. '{"document":20,"frequency":1,"positions":[21]},' +
  119. '{"document":21,"frequency":6,"positions":[3,4,5,8,9,10]}]',
  120. tde.to_json())
  121. tde = @ir.term_positions_for(:body, "read")
  122. assert_equal('[' +
  123. '[1,1,[3]],' +
  124. '[2,2,[1,4]],' +
  125. '[6,4,[3,4,5,6]],' +
  126. '[9,3,[0,4,13]],' +
  127. '[10,1,[1]],' +
  128. '[16,2,[2,3]],' +
  129. '[17,1,[2]],' +
  130. '[20,1,[21]],' +
  131. '[21,6,[3,4,5,8,9,10]]]',
  132. tde.to_json(:fast))
  133. tde = @ir.term_positions_for(:body, "read")
  134. do_test_term_docpos_enum_skip_to(tde)
  135. end
  136. def do_test_term_docpos_enum_skip_to(tde)
  137. tde.seek(:text, "skip")
  138. [
  139. [10, 22],
  140. [44, 44],
  141. [60, 60],
  142. [62, 62],
  143. [63, 63],
  144. ].each do |skip_doc, doc_and_freq|
  145. assert(tde.skip_to(skip_doc))
  146. assert_equal(doc_and_freq, tde.doc())
  147. assert_equal(doc_and_freq, tde.freq())
  148. end
  149. assert(! tde.skip_to(IndexTestHelper::INDEX_TEST_DOC_COUNT))
  150. assert(! tde.skip_to(IndexTestHelper::INDEX_TEST_DOC_COUNT))
  151. assert(! tde.skip_to(IndexTestHelper::INDEX_TEST_DOC_COUNT + 100))
  152. tde.seek(:text, "skip")
  153. assert(! tde.skip_to(IndexTestHelper::INDEX_TEST_DOC_COUNT))
  154. end
  155. def do_test_term_vectors()
  156. expected_tv = TermVector.new(:body,
  157. [
  158. TVTerm.new("word1", 3, [2, 4, 7]),
  159. TVTerm.new("word2", 1, [3]),
  160. TVTerm.new("word3", 4, [0, 5, 8, 9]),
  161. TVTerm.new("word4", 2, [1, 6])
  162. ],
  163. [*(0...10)].collect {|i| TVOffsets.new(i*6, (i+1)*6 - 1)})
  164. tv = @ir.term_vector(3, :body)
  165. assert_equal(expected_tv, tv)
  166. tvs = @ir.term_vectors(3)
  167. assert_equal(3, tvs.size)
  168. assert_equal(expected_tv, tvs[:body])
  169. tv = tvs[:author]
  170. assert_equal(:author, tv.field)
  171. assert_equal([TVTerm.new("Leo", 1, [0]), TVTerm.new("Tolstoy", 1, [1])], tv.terms)
  172. assert(tv.offsets.nil?)
  173. tv = tvs[:title]
  174. assert_equal(:title, tv.field)
  175. assert_equal([TVTerm.new("War And Peace", 1, nil)], tv.terms)
  176. assert_equal([TVOffsets.new(0, 13)], tv.offsets)
  177. end
  178. def do_test_get_doc()
  179. doc = @ir.get_document(3)
  180. [:author, :body, :title, :year].each {|fn| assert(doc.fields.include?(fn))}
  181. assert_equal(4, doc.fields.size)
  182. assert_equal(0, doc.size)
  183. assert_equal([], doc.keys)
  184. assert_equal("Leo Tolstoy", doc[:author])
  185. assert_equal("word3 word4 word1 word2 word1 word3 word4 word1 word3 word3",
  186. doc[:body])
  187. assert_equal("War And Peace", doc[:title])
  188. assert_equal("1865", doc[:year])
  189. assert_nil(doc[:text])
  190. assert_equal(4, doc.size)
  191. [:author, :body, :title, :year].each {|fn| assert(doc.keys.include?(fn))}
  192. assert_equal([@ir[0].load, @ir[1].load, @ir[2].load], @ir[0, 3].collect {|d| d.load})
  193. assert_equal([@ir[61].load, @ir[62].load, @ir[63].load], @ir[61, 100].collect {|d| d.load})
  194. assert_equal([@ir[0].load, @ir[1].load, @ir[2].load], @ir[0..2].collect {|d| d.load})
  195. assert_equal([@ir[61].load, @ir[62].load, @ir[63].load], @ir[61..100].collect {|d| d.load})
  196. assert_equal(@ir[-60], @ir[4])
  197. end
  198. def test_ir_norms()
  199. @ir.set_norm(3, :title, 1)
  200. @ir.set_norm(3, :body, 12)
  201. @ir.set_norm(3, :author, 145)
  202. @ir.set_norm(3, :year, 31)
  203. @ir.set_norm(3, :text, 202)
  204. @ir.set_norm(25, :text, 20)
  205. @ir.set_norm(50, :text, 200)
  206. @ir.set_norm(63, :text, 155)
  207. norms = @ir.norms(:text)
  208. assert_equal(202, norms[ 3])
  209. assert_equal( 20, norms[25])
  210. assert_equal(200, norms[50])
  211. assert_equal(155, norms[63])
  212. norms = @ir.norms(:title)
  213. assert_equal(1, norms[3])
  214. norms = @ir.norms(:body)
  215. assert_equal(12, norms[3])
  216. norms = @ir.norms(:author)
  217. assert_equal(145, norms[3])
  218. norms = @ir.norms(:year)
  219. # TODO: this returns two possible results depending on whether it is
  220. # a multi reader or a segment reader. If it is a multi reader it will
  221. # always return an empty set of norms, otherwise it will return nil.
  222. # I'm not sure what to do here just yet or if this is even an issue.
  223. #assert(norms.nil?)
  224. norms = " " * 164
  225. @ir.get_norms_into(:text, norms, 100)
  226. assert_equal(202, norms[103])
  227. assert_equal( 20, norms[125])
  228. assert_equal(200, norms[150])
  229. assert_equal(155, norms[163])
  230. @ir.commit()
  231. iw_optimize()
  232. ir2 = ir_new()
  233. norms = " " * 164
  234. ir2.get_norms_into(:text, norms, 100)
  235. assert_equal(202, norms[103])
  236. assert_equal( 20, norms[125])
  237. assert_equal(200, norms[150])
  238. assert_equal(155, norms[163])
  239. ir2.close()
  240. end
  241. def test_ir_delete()
  242. doc_count = IndexTestHelper::INDEX_TEST_DOCS.size
  243. @ir.delete(1000) # non existant doc_num
  244. assert(! @ir.has_deletions?())
  245. assert_equal(doc_count, @ir.max_doc())
  246. assert_equal(doc_count, @ir.num_docs())
  247. assert(! @ir.deleted?(10))
  248. [
  249. [10, doc_count - 1],
  250. [10, doc_count - 1],
  251. [doc_count - 1, doc_count - 2],
  252. [doc_count - 2, doc_count - 3],
  253. ].each do |del_num, num_docs|
  254. @ir.delete(del_num)
  255. assert(@ir.has_deletions?())
  256. assert_equal(doc_count, @ir.max_doc())
  257. assert_equal(num_docs, @ir.num_docs())
  258. assert(@ir.deleted?(del_num))
  259. end
  260. @ir.undelete_all()
  261. assert(! @ir.has_deletions?())
  262. assert_equal(doc_count, @ir.max_doc())
  263. assert_equal(doc_count, @ir.num_docs())
  264. assert(! @ir.deleted?(10))
  265. assert(! @ir.deleted?(doc_count - 2))
  266. assert(! @ir.deleted?(doc_count - 1))
  267. del_list = [10, 20, 30, 40, 50, doc_count - 1]
  268. del_list.each {|doc_num| @ir.delete(doc_num)}
  269. assert(@ir.has_deletions?())
  270. assert_equal(doc_count, @ir.max_doc())
  271. assert_equal(doc_count - del_list.size, @ir.num_docs())
  272. del_list.each {|doc_num| assert(@ir.deleted?(doc_num))}
  273. ir2 = ir_new()
  274. assert(! ir2.has_deletions?())
  275. assert_equal(doc_count, ir2.max_doc())
  276. assert_equal(doc_count, ir2.num_docs())
  277. @ir.commit()
  278. assert(! ir2.has_deletions?())
  279. assert_equal(doc_count, ir2.max_doc())
  280. assert_equal(doc_count, ir2.num_docs())
  281. ir2.close
  282. ir2 = ir_new()
  283. assert(ir2.has_deletions?())
  284. assert_equal(doc_count, ir2.max_doc())
  285. assert_equal(doc_count - 6, ir2.num_docs())
  286. del_list.each {|doc_num| assert(ir2.deleted?(doc_num))}
  287. ir2.undelete_all()
  288. assert(! ir2.has_deletions?())
  289. assert_equal(doc_count, ir2.max_doc())
  290. assert_equal(doc_count, ir2.num_docs())
  291. del_list.each {|doc_num| assert(! ir2.deleted?(doc_num))}
  292. del_list.each {|doc_num| assert(@ir.deleted?(doc_num))}
  293. ir2.commit()
  294. del_list.each {|doc_num| assert(@ir.deleted?(doc_num))}
  295. del_list.each {|doc_num| ir2.delete(doc_num)}
  296. ir2.commit()
  297. iw_optimize()
  298. ir3 = ir_new()
  299. assert(!ir3.has_deletions?())
  300. assert_equal(doc_count - 6, ir3.max_doc())
  301. assert_equal(doc_count - 6, ir3.num_docs())
  302. ir2.close()
  303. ir3.close()
  304. end
  305. def test_latest
  306. assert(@ir.latest?)
  307. ir2 = ir_new()
  308. assert(ir2.latest?)
  309. ir2.delete(0)
  310. ir2.commit()
  311. assert(ir2.latest?)
  312. assert(!@ir.latest?)
  313. ir2.close()
  314. end
  315. end
  316. class MultiReaderTest < Test::Unit::TestCase
  317. include IndexReaderCommon
  318. def ir_new
  319. IndexReader.new(@dir)
  320. end
  321. def iw_optimize
  322. iw = IndexWriter.new(:dir => @dir, :analyzer => WhiteSpaceAnalyzer.new())
  323. iw.optimize()
  324. iw.close()
  325. end
  326. def setup
  327. @dir = Ferret::Store::RAMDirectory.new()
  328. iw = IndexWriter.new(:dir => @dir,
  329. :analyzer => WhiteSpaceAnalyzer.new(),
  330. :create => true,
  331. :field_infos => IndexTestHelper::INDEX_TEST_FIS,
  332. :max_buffered_docs => 15)
  333. IndexTestHelper::INDEX_TEST_DOCS.each {|doc| iw << doc}
  334. # we mustn't optimize here so that MultiReader is used.
  335. #iw.optimize() unless self.class == MultiReaderTest
  336. iw.close()
  337. @ir = ir_new()
  338. end
  339. def teardown()
  340. @ir.close()
  341. @dir.close()
  342. end
  343. end
  344. class SegmentReaderTest < MultiReaderTest
  345. end
  346. class MultiExternalReaderTest < Test::Unit::TestCase
  347. include IndexReaderCommon
  348. def ir_new
  349. readers = @dirs.collect {|dir| IndexReader.new(dir) }
  350. IndexReader.new(readers)
  351. end
  352. def iw_optimize
  353. @dirs.each do |dir|
  354. iw = IndexWriter.new(:dir => dir, :analyzer => WhiteSpaceAnalyzer.new())
  355. iw.optimize()
  356. iw.close()
  357. end
  358. end
  359. def setup()
  360. @dirs = []
  361. [
  362. [0, 10],
  363. [10, 30],
  364. [30, IndexTestHelper::INDEX_TEST_DOCS.size]
  365. ].each do |start, finish|
  366. dir = Ferret::Store::RAMDirectory.new()
  367. @dirs << dir
  368. iw = IndexWriter.new(:dir => dir,
  369. :analyzer => WhiteSpaceAnalyzer.new(),
  370. :create => true,
  371. :field_infos => IndexTestHelper::INDEX_TEST_FIS)
  372. (start...finish).each do |doc_id|
  373. iw << IndexTestHelper::INDEX_TEST_DOCS[doc_id]
  374. end
  375. iw.close()
  376. end
  377. @ir = ir_new
  378. end
  379. def teardown()
  380. @ir.close()
  381. @dirs.each {|dir| dir.close}
  382. end
  383. end
  384. class MultiExternalReaderDirTest < Test::Unit::TestCase
  385. include IndexReaderCommon
  386. def ir_new
  387. IndexReader.new(@dirs)
  388. end
  389. def iw_optimize
  390. @dirs.each do |dir|
  391. iw = IndexWriter.new(:dir => dir, :analyzer => WhiteSpaceAnalyzer.new())
  392. iw.optimize()
  393. iw.close()
  394. end
  395. end
  396. def setup()
  397. @dirs = []
  398. [
  399. [0, 10],
  400. [10, 30],
  401. [30, IndexTestHelper::INDEX_TEST_DOCS.size]
  402. ].each do |start, finish|
  403. dir = Ferret::Store::RAMDirectory.new()
  404. @dirs << dir
  405. iw = IndexWriter.new(:dir => dir,
  406. :analyzer => WhiteSpaceAnalyzer.new(),
  407. :create => true,
  408. :field_infos => IndexTestHelper::INDEX_TEST_FIS)
  409. (start...finish).each do |doc_id|
  410. iw << IndexTestHelper::INDEX_TEST_DOCS[doc_id]
  411. end
  412. iw.close()
  413. end
  414. @ir = ir_new
  415. end
  416. def teardown()
  417. @ir.close()
  418. @dirs.each {|dir| dir.close}
  419. end
  420. end
  421. class MultiExternalReaderPathTest < Test::Unit::TestCase
  422. include IndexReaderCommon
  423. def ir_new
  424. IndexReader.new(@paths)
  425. end
  426. def iw_optimize
  427. @paths.each do |path|
  428. iw = IndexWriter.new(:path => path, :analyzer => WhiteSpaceAnalyzer.new())
  429. iw.optimize()
  430. iw.close()
  431. end
  432. end
  433. def setup()
  434. base_dir = File.expand_path(File.join(File.dirname(__FILE__),
  435. '../../temp/multidir'))
  436. FileUtils.mkdir_p(base_dir)
  437. @paths = [
  438. File.join(base_dir, "i1"),
  439. File.join(base_dir, "i2"),
  440. File.join(base_dir, "i3")
  441. ]
  442. [
  443. [0, 10],
  444. [10, 30],
  445. [30, IndexTestHelper::INDEX_TEST_DOCS.size]
  446. ].each_with_index do |(start, finish), i|
  447. path = @paths[i]
  448. iw = IndexWriter.new(:path => path,
  449. :analyzer => WhiteSpaceAnalyzer.new(),
  450. :create => true,
  451. :field_infos => IndexTestHelper::INDEX_TEST_FIS)
  452. (start...finish).each do |doc_id|
  453. iw << IndexTestHelper::INDEX_TEST_DOCS[doc_id]
  454. end
  455. iw.close()
  456. end
  457. @ir = ir_new
  458. end
  459. def teardown()
  460. @ir.close()
  461. end
  462. end
  463. class IndexReaderTest < Test::Unit::TestCase
  464. include Ferret::Index
  465. include Ferret::Analysis
  466. def setup()
  467. @dir = Ferret::Store::RAMDirectory.new()
  468. end
  469. def teardown()
  470. @dir.close()
  471. end
  472. def test_ir_multivalue_fields()
  473. @fs_dpath = File.expand_path(File.join(File.dirname(__FILE__),
  474. '../../temp/fsdir'))
  475. @fs_dir = Ferret::Store::FSDirectory.new(@fs_dpath, true)
  476. iw = IndexWriter.new(:dir => @fs_dir,
  477. :analyzer => WhiteSpaceAnalyzer.new(),
  478. :create => true)
  479. doc = {
  480. :tag => ["Ruby", "C", "Lucene", "Ferret"],
  481. :body => "this is the body Document Field",
  482. :title => "this is the title DocField",
  483. :author => "this is the author field"
  484. }
  485. iw << doc
  486. iw.close()
  487. @dir = Ferret::Store::RAMDirectory.new(@fs_dir)
  488. ir = IndexReader.new(@dir)
  489. assert_equal(doc, ir.get_document(0).load)
  490. ir.close
  491. end
  492. def do_test_term_vectors(ir)
  493. expected_tv = TermVector.new(:body,
  494. [
  495. TVTerm.new("word1", 3, [2, 4, 7]),
  496. TVTerm.new("word2", 1, [3]),
  497. TVTerm.new("word3", 4, [0, 5, 8, 9]),
  498. TVTerm.new("word4", 2, [1, 6])
  499. ],
  500. [*(0...10)].collect {|i| TVOffsets.new(i*6, (i+1)*6 - 1)})
  501. tv = ir.term_vector(3, :body)
  502. assert_equal(expected_tv, tv)
  503. tvs = ir.term_vectors(3)
  504. assert_equal(3, tvs.size)
  505. assert_equal(expected_tv, tvs[:body])
  506. tv = tvs[:author]
  507. assert_equal(:author, tv.field)
  508. assert_equal([TVTerm.new("Leo", 1, [0]), TVTerm.new("Tolstoy", 1, [1])], tv.terms)
  509. assert(tv.offsets.nil?)
  510. tv = tvs[:title]
  511. assert_equal(:title, tv.field)
  512. assert_equal([TVTerm.new("War And Peace", 1, nil)], tv.terms)
  513. assert_equal([TVOffsets.new(0, 13)], tv.offsets)
  514. end
  515. def do_test_ir_read_while_optimizing(dir)
  516. iw = IndexWriter.new(:dir => dir,
  517. :analyzer => WhiteSpaceAnalyzer.new(),
  518. :create => true,
  519. :field_infos => IndexTestHelper::INDEX_TEST_FIS)
  520. IndexTestHelper::INDEX_TEST_DOCS.each {|doc| iw << doc}
  521. iw.close()
  522. ir = IndexReader.new(dir)
  523. do_test_term_vectors(ir)
  524. iw = IndexWriter.new(:dir => dir, :analyzer => WhiteSpaceAnalyzer.new())
  525. iw.optimize()
  526. iw.close()
  527. do_test_term_vectors(ir)
  528. ir.close()
  529. end
  530. def test_ir_read_while_optimizing()
  531. do_test_ir_read_while_optimizing(@dir)
  532. end
  533. def test_ir_read_while_optimizing_on_disk()
  534. dpath = File.expand_path(File.join(File.dirname(__FILE__),
  535. '../../temp/fsdir'))
  536. fs_dir = Ferret::Store::FSDirectory.new(dpath, true)
  537. do_test_ir_read_while_optimizing(fs_dir)
  538. fs_dir.close()
  539. end
  540. def test_latest()
  541. dpath = File.expand_path(File.join(File.dirname(__FILE__),
  542. '../../temp/fsdir'))
  543. fs_dir = Ferret::Store::FSDirectory.new(dpath, true)
  544. iw = IndexWriter.new(:dir => fs_dir,
  545. :analyzer => WhiteSpaceAnalyzer.new(),
  546. :create => true)
  547. iw << {:field => "content"}
  548. iw.close()
  549. ir = IndexReader.new(fs_dir)
  550. assert(ir.latest?)
  551. iw = IndexWriter.new(:dir => fs_dir, :analyzer => WhiteSpaceAnalyzer.new())
  552. iw << {:field => "content2"}
  553. iw.close()
  554. assert(!ir.latest?)
  555. ir.close()
  556. ir = IndexReader.new(fs_dir)
  557. assert(ir.latest?)
  558. ir.close()
  559. end
  560. end