PageRenderTime 68ms CodeModel.GetById 30ms RepoModel.GetById 1ms app.codeStats 0ms

/spec/adapters/postgres_spec.rb

https://github.com/lee-dohm/sequel
Ruby | 2568 lines | 2282 code | 269 blank | 17 comment | 633 complexity | 2041a41019f488fdc497dd93532a1451 MD5 | raw file

Large files files are truncated, but you can click here to view the full file

  1. require File.join(File.dirname(File.expand_path(__FILE__)), 'spec_helper.rb')
  2. unless defined?(POSTGRES_DB)
  3. POSTGRES_URL = 'postgres://postgres:postgres@localhost:5432/reality_spec' unless defined? POSTGRES_URL
  4. POSTGRES_DB = Sequel.connect(ENV['SEQUEL_PG_SPEC_DB']||POSTGRES_URL)
  5. end
  6. INTEGRATION_DB = POSTGRES_DB unless defined?(INTEGRATION_DB)
  7. # Automatic parameterization changes the SQL used, so don't check
  8. # for expected SQL if it is being used.
  9. if defined?(Sequel::Postgres::AutoParameterize)
  10. check_sqls = false
  11. else
  12. check_sqls = true
  13. end
  14. def POSTGRES_DB.sqls
  15. (@sqls ||= [])
  16. end
  17. logger = Object.new
  18. def logger.method_missing(m, msg)
  19. POSTGRES_DB.sqls << msg
  20. end
  21. POSTGRES_DB.loggers << logger
  22. #POSTGRES_DB.instance_variable_set(:@server_version, 80200)
  23. POSTGRES_DB.create_table! :test do
  24. text :name
  25. integer :value, :index => true
  26. end
  27. POSTGRES_DB.create_table! :test2 do
  28. text :name
  29. integer :value
  30. end
  31. POSTGRES_DB.create_table! :test3 do
  32. integer :value
  33. timestamp :time
  34. end
  35. POSTGRES_DB.create_table! :test4 do
  36. varchar :name, :size => 20
  37. bytea :value
  38. end
  39. describe "A PostgreSQL database" do
  40. before do
  41. @db = POSTGRES_DB
  42. end
  43. specify "should provide the server version" do
  44. @db.server_version.should > 70000
  45. end
  46. specify "should correctly parse the schema" do
  47. @db.schema(:test3, :reload=>true).should == [
  48. [:value, {:oid=>23, :type=>:integer, :allow_null=>true, :default=>nil, :ruby_default=>nil, :db_type=>"integer", :primary_key=>false}],
  49. [:time, {:oid=>1114, :type=>:datetime, :allow_null=>true, :default=>nil, :ruby_default=>nil, :db_type=>"timestamp without time zone", :primary_key=>false}]
  50. ]
  51. @db.schema(:test4, :reload=>true).should == [
  52. [:name, {:oid=>1043, :type=>:string, :allow_null=>true, :default=>nil, :ruby_default=>nil, :db_type=>"character varying(20)", :primary_key=>false}],
  53. [:value, {:oid=>17, :type=>:blob, :allow_null=>true, :default=>nil, :ruby_default=>nil, :db_type=>"bytea", :primary_key=>false}]
  54. ]
  55. end
  56. specify "should parse foreign keys for tables in a schema" do
  57. begin
  58. @db.create_table!(:public__testfk){primary_key :id; foreign_key :i, :public__testfk}
  59. @db.foreign_key_list(:public__testfk).should == [{:on_delete=>:no_action, :on_update=>:no_action, :columns=>[:i], :key=>[:id], :deferrable=>false, :table=>Sequel.qualify(:public, :testfk), :name=>:testfk_i_fkey}]
  60. ensure
  61. @db.drop_table(:public__testfk)
  62. end
  63. end
  64. specify "should return uuid fields as strings" do
  65. @db.get(Sequel.cast('550e8400-e29b-41d4-a716-446655440000', :uuid)).should == '550e8400-e29b-41d4-a716-446655440000'
  66. end
  67. end
  68. describe "A PostgreSQL dataset" do
  69. before do
  70. @d = POSTGRES_DB[:test]
  71. @d.delete
  72. POSTGRES_DB.sqls.clear
  73. end
  74. specify "should quote columns and tables using double quotes if quoting identifiers" do
  75. @d.select(:name).sql.should == 'SELECT "name" FROM "test"'
  76. @d.select(Sequel.lit('COUNT(*)')).sql.should == 'SELECT COUNT(*) FROM "test"'
  77. @d.select(Sequel.function(:max, :value)).sql.should == 'SELECT max("value") FROM "test"'
  78. @d.select(Sequel.function(:NOW)).sql.should == 'SELECT NOW() FROM "test"'
  79. @d.select(Sequel.function(:max, :items__value)).sql.should == 'SELECT max("items"."value") FROM "test"'
  80. @d.order(Sequel.desc(:name)).sql.should == 'SELECT * FROM "test" ORDER BY "name" DESC'
  81. @d.select(Sequel.lit('test.name AS item_name')).sql.should == 'SELECT test.name AS item_name FROM "test"'
  82. @d.select(Sequel.lit('"name"')).sql.should == 'SELECT "name" FROM "test"'
  83. @d.select(Sequel.lit('max(test."name") AS "max_name"')).sql.should == 'SELECT max(test."name") AS "max_name" FROM "test"'
  84. @d.insert_sql(:x => :y).should =~ /\AINSERT INTO "test" \("x"\) VALUES \("y"\)( RETURNING NULL)?\z/
  85. if check_sqls
  86. @d.select(Sequel.function(:test, :abc, 'hello')).sql.should == "SELECT test(\"abc\", 'hello') FROM \"test\""
  87. @d.select(Sequel.function(:test, :abc__def, 'hello')).sql.should == "SELECT test(\"abc\".\"def\", 'hello') FROM \"test\""
  88. @d.select(Sequel.function(:test, :abc__def, 'hello').as(:x2)).sql.should == "SELECT test(\"abc\".\"def\", 'hello') AS \"x2\" FROM \"test\""
  89. @d.insert_sql(:value => 333).should =~ /\AINSERT INTO "test" \("value"\) VALUES \(333\)( RETURNING NULL)?\z/
  90. end
  91. end
  92. specify "should quote fields correctly when reversing the order if quoting identifiers" do
  93. @d.reverse_order(:name).sql.should == 'SELECT * FROM "test" ORDER BY "name" DESC'
  94. @d.reverse_order(Sequel.desc(:name)).sql.should == 'SELECT * FROM "test" ORDER BY "name" ASC'
  95. @d.reverse_order(:name, Sequel.desc(:test)).sql.should == 'SELECT * FROM "test" ORDER BY "name" DESC, "test" ASC'
  96. @d.reverse_order(Sequel.desc(:name), :test).sql.should == 'SELECT * FROM "test" ORDER BY "name" ASC, "test" DESC'
  97. end
  98. specify "should support regexps" do
  99. @d << {:name => 'abc', :value => 1}
  100. @d << {:name => 'bcd', :value => 2}
  101. @d.filter(:name => /bc/).count.should == 2
  102. @d.filter(:name => /^bc/).count.should == 1
  103. end
  104. specify "should support NULLS FIRST and NULLS LAST" do
  105. @d << {:name => 'abc'}
  106. @d << {:name => 'bcd'}
  107. @d << {:name => 'bcd', :value => 2}
  108. @d.order(Sequel.asc(:value, :nulls=>:first), :name).select_map(:name).should == %w[abc bcd bcd]
  109. @d.order(Sequel.asc(:value, :nulls=>:last), :name).select_map(:name).should == %w[bcd abc bcd]
  110. @d.order(Sequel.asc(:value, :nulls=>:first), :name).reverse.select_map(:name).should == %w[bcd bcd abc]
  111. end
  112. specify "#lock should lock tables and yield if a block is given" do
  113. @d.lock('EXCLUSIVE'){@d.insert(:name=>'a')}
  114. end
  115. specify "should support exclusion constraints when creating or altering tables" do
  116. begin
  117. @db = POSTGRES_DB
  118. @db.create_table!(:atest){Integer :t; exclude [[Sequel.desc(:t, :nulls=>:last), '=']], :using=>:btree, :where=>proc{t > 0}}
  119. @db[:atest].insert(1)
  120. @db[:atest].insert(2)
  121. proc{@db[:atest].insert(2)}.should raise_error(Sequel::DatabaseError)
  122. @db.create_table!(:atest){Integer :t}
  123. @db.alter_table(:atest){add_exclusion_constraint [[:t, '=']], :using=>:btree, :name=>'atest_ex'}
  124. @db[:atest].insert(1)
  125. @db[:atest].insert(2)
  126. proc{@db[:atest].insert(2)}.should raise_error(Sequel::DatabaseError)
  127. @db.alter_table(:atest){drop_constraint 'atest_ex'}
  128. ensure
  129. @db.drop_table?(:atest)
  130. end
  131. end if POSTGRES_DB.server_version >= 90000
  132. specify "should support adding foreign key constarints that are not yet valid, and validating them later" do
  133. begin
  134. @db = POSTGRES_DB
  135. @db.create_table!(:atest){primary_key :id; Integer :fk}
  136. @db[:atest].insert(1, 5)
  137. @db.alter_table(:atest){add_foreign_key [:fk], :atest, :not_valid=>true, :name=>:atest_fk}
  138. @db[:atest].insert(2, 1)
  139. proc{@db[:atest].insert(3, 4)}.should raise_error(Sequel::DatabaseError)
  140. proc{@db.alter_table(:atest){validate_constraint :atest_fk}}.should raise_error(Sequel::DatabaseError)
  141. @db[:atest].where(:id=>1).update(:fk=>2)
  142. @db.alter_table(:atest){validate_constraint :atest_fk}
  143. proc{@db.alter_table(:atest){validate_constraint :atest_fk}}.should_not raise_error
  144. ensure
  145. @db.drop_table?(:atest)
  146. end
  147. end if POSTGRES_DB.server_version >= 90200
  148. specify "should support :using when altering a column's type" do
  149. begin
  150. @db = POSTGRES_DB
  151. @db.create_table!(:atest){Integer :t}
  152. @db[:atest].insert(1262304000)
  153. @db.alter_table(:atest){set_column_type :t, Time, :using=>Sequel.cast('epoch', Time) + Sequel.cast('1 second', :interval) * :t}
  154. @db[:atest].get(Sequel.extract(:year, :t)).should == 2010
  155. ensure
  156. @db.drop_table?(:atest)
  157. end
  158. end
  159. specify "should support :using with a string when altering a column's type" do
  160. begin
  161. @db = POSTGRES_DB
  162. @db.create_table!(:atest){Integer :t}
  163. @db[:atest].insert(1262304000)
  164. @db.alter_table(:atest){set_column_type :t, Time, :using=>"'epoch'::timestamp + '1 second'::interval * t"}
  165. @db[:atest].get(Sequel.extract(:year, :t)).should == 2010
  166. ensure
  167. @db.drop_table?(:atest)
  168. end
  169. end
  170. specify "should have #transaction support various types of synchronous options" do
  171. @db = POSTGRES_DB
  172. @db.transaction(:synchronous=>:on){}
  173. @db.transaction(:synchronous=>true){}
  174. @db.transaction(:synchronous=>:off){}
  175. @db.transaction(:synchronous=>false){}
  176. @db.sqls.grep(/synchronous/).should == ["SET LOCAL synchronous_commit = on", "SET LOCAL synchronous_commit = on", "SET LOCAL synchronous_commit = off", "SET LOCAL synchronous_commit = off"]
  177. @db.sqls.clear
  178. @db.transaction(:synchronous=>nil){}
  179. @db.sqls.should == ['BEGIN', 'COMMIT']
  180. if @db.server_version >= 90100
  181. @db.sqls.clear
  182. @db.transaction(:synchronous=>:local){}
  183. @db.sqls.grep(/synchronous/).should == ["SET LOCAL synchronous_commit = local"]
  184. if @db.server_version >= 90200
  185. @db.sqls.clear
  186. @db.transaction(:synchronous=>:remote_write){}
  187. @db.sqls.grep(/synchronous/).should == ["SET LOCAL synchronous_commit = remote_write"]
  188. end
  189. end
  190. end
  191. specify "should have #transaction support read only transactions" do
  192. @db = POSTGRES_DB
  193. @db.transaction(:read_only=>true){}
  194. @db.transaction(:read_only=>false){}
  195. @db.transaction(:isolation=>:serializable, :read_only=>true){}
  196. @db.transaction(:isolation=>:serializable, :read_only=>false){}
  197. @db.sqls.grep(/READ/).should == ["SET TRANSACTION READ ONLY", "SET TRANSACTION READ WRITE", "SET TRANSACTION ISOLATION LEVEL SERIALIZABLE READ ONLY", "SET TRANSACTION ISOLATION LEVEL SERIALIZABLE READ WRITE"]
  198. end
  199. specify "should have #transaction support deferrable transactions" do
  200. @db = POSTGRES_DB
  201. @db.transaction(:deferrable=>true){}
  202. @db.transaction(:deferrable=>false){}
  203. @db.transaction(:deferrable=>true, :read_only=>true){}
  204. @db.transaction(:deferrable=>false, :read_only=>false){}
  205. @db.transaction(:isolation=>:serializable, :deferrable=>true, :read_only=>true){}
  206. @db.transaction(:isolation=>:serializable, :deferrable=>false, :read_only=>false){}
  207. @db.sqls.grep(/DEF/).should == ["SET TRANSACTION DEFERRABLE", "SET TRANSACTION NOT DEFERRABLE", "SET TRANSACTION READ ONLY DEFERRABLE", "SET TRANSACTION READ WRITE NOT DEFERRABLE", "SET TRANSACTION ISOLATION LEVEL SERIALIZABLE READ ONLY DEFERRABLE", "SET TRANSACTION ISOLATION LEVEL SERIALIZABLE READ WRITE NOT DEFERRABLE"]
  208. end if POSTGRES_DB.server_version >= 90100
  209. specify "should support creating indexes concurrently" do
  210. POSTGRES_DB.sqls.clear
  211. POSTGRES_DB.add_index :test, [:name, :value], :concurrently=>true
  212. POSTGRES_DB.sqls.should == ['CREATE INDEX CONCURRENTLY "test_name_value_index" ON "test" ("name", "value")'] if check_sqls
  213. end
  214. specify "should support dropping indexes only if they already exist" do
  215. POSTGRES_DB.add_index :test, [:name, :value], :name=>'tnv1'
  216. POSTGRES_DB.sqls.clear
  217. POSTGRES_DB.drop_index :test, [:name, :value], :if_exists=>true, :name=>'tnv1'
  218. POSTGRES_DB.sqls.should == ['DROP INDEX IF EXISTS "tnv1"']
  219. end
  220. specify "should support CASCADE when dropping indexes" do
  221. POSTGRES_DB.add_index :test, [:name, :value], :name=>'tnv2'
  222. POSTGRES_DB.sqls.clear
  223. POSTGRES_DB.drop_index :test, [:name, :value], :cascade=>true, :name=>'tnv2'
  224. POSTGRES_DB.sqls.should == ['DROP INDEX "tnv2" CASCADE']
  225. end
  226. specify "should support dropping indexes concurrently" do
  227. POSTGRES_DB.add_index :test, [:name, :value], :name=>'tnv2'
  228. POSTGRES_DB.sqls.clear
  229. POSTGRES_DB.drop_index :test, [:name, :value], :concurrently=>true, :name=>'tnv2'
  230. POSTGRES_DB.sqls.should == ['DROP INDEX CONCURRENTLY "tnv2"']
  231. end if POSTGRES_DB.server_version >= 90200
  232. specify "#lock should lock table if inside a transaction" do
  233. POSTGRES_DB.transaction{@d.lock('EXCLUSIVE'); @d.insert(:name=>'a')}
  234. end
  235. specify "#lock should return nil" do
  236. @d.lock('EXCLUSIVE'){@d.insert(:name=>'a')}.should == nil
  237. POSTGRES_DB.transaction{@d.lock('EXCLUSIVE').should == nil; @d.insert(:name=>'a')}
  238. end
  239. specify "should raise an error if attempting to update a joined dataset with a single FROM table" do
  240. proc{POSTGRES_DB[:test].join(:test2, [:name]).update(:name=>'a')}.should raise_error(Sequel::Error, 'Need multiple FROM tables if updating/deleting a dataset with JOINs')
  241. end
  242. specify "should truncate with options" do
  243. @d << { :name => 'abc', :value => 1}
  244. @d.count.should == 1
  245. @d.truncate(:cascade => true)
  246. @d.count.should == 0
  247. if @d.db.server_version > 80400
  248. @d << { :name => 'abc', :value => 1}
  249. @d.truncate(:cascade => true, :only=>true, :restart=>true)
  250. @d.count.should == 0
  251. end
  252. end
  253. specify "should truncate multiple tables at once" do
  254. tables = [:test, :test2, :test3, :test4]
  255. tables.each{|t| @d.from(t).insert}
  256. @d.from(:test, :test2, :test3, :test4).truncate
  257. tables.each{|t| @d.from(t).count.should == 0}
  258. end
  259. end
  260. describe "Dataset#distinct" do
  261. before do
  262. @db = POSTGRES_DB
  263. @db.create_table!(:a) do
  264. Integer :a
  265. Integer :b
  266. end
  267. @ds = @db[:a]
  268. end
  269. after do
  270. @db.drop_table?(:a)
  271. end
  272. it "#distinct with arguments should return results distinct on those arguments" do
  273. @ds.insert(20, 10)
  274. @ds.insert(30, 10)
  275. @ds.order(:b, :a).distinct.map(:a).should == [20, 30]
  276. @ds.order(:b, Sequel.desc(:a)).distinct.map(:a).should == [30, 20]
  277. @ds.order(:b, :a).distinct(:b).map(:a).should == [20]
  278. @ds.order(:b, Sequel.desc(:a)).distinct(:b).map(:a).should == [30]
  279. end
  280. end
  281. if POSTGRES_DB.pool.respond_to?(:max_size) and POSTGRES_DB.pool.max_size > 1
  282. describe "Dataset#for_update support" do
  283. before do
  284. @db = POSTGRES_DB.create_table!(:items) do
  285. primary_key :id
  286. Integer :number
  287. String :name
  288. end
  289. @ds = POSTGRES_DB[:items]
  290. end
  291. after do
  292. POSTGRES_DB.drop_table?(:items)
  293. POSTGRES_DB.disconnect
  294. end
  295. specify "should handle FOR UPDATE" do
  296. @ds.insert(:number=>20)
  297. c, t = nil, nil
  298. q = Queue.new
  299. POSTGRES_DB.transaction do
  300. @ds.for_update.first(:id=>1)
  301. t = Thread.new do
  302. POSTGRES_DB.transaction do
  303. q.push nil
  304. @ds.filter(:id=>1).update(:name=>'Jim')
  305. c = @ds.first(:id=>1)
  306. q.push nil
  307. end
  308. end
  309. q.pop
  310. @ds.filter(:id=>1).update(:number=>30)
  311. end
  312. q.pop
  313. t.join
  314. c.should == {:id=>1, :number=>30, :name=>'Jim'}
  315. end
  316. specify "should handle FOR SHARE" do
  317. @ds.insert(:number=>20)
  318. c, t = nil
  319. q = Queue.new
  320. POSTGRES_DB.transaction do
  321. @ds.for_share.first(:id=>1)
  322. t = Thread.new do
  323. POSTGRES_DB.transaction do
  324. c = @ds.for_share.filter(:id=>1).first
  325. q.push nil
  326. end
  327. end
  328. q.pop
  329. @ds.filter(:id=>1).update(:name=>'Jim')
  330. c.should == {:id=>1, :number=>20, :name=>nil}
  331. end
  332. t.join
  333. end
  334. end
  335. end
  336. describe "A PostgreSQL dataset with a timestamp field" do
  337. before do
  338. @db = POSTGRES_DB
  339. @d = @db[:test3]
  340. @d.delete
  341. end
  342. after do
  343. @db.convert_infinite_timestamps = false if @db.adapter_scheme == :postgres
  344. end
  345. cspecify "should store milliseconds in time fields for Time objects", :do, :swift do
  346. t = Time.now
  347. @d << {:value=>1, :time=>t}
  348. t2 = @d[:value =>1][:time]
  349. @d.literal(t2).should == @d.literal(t)
  350. t2.strftime('%Y-%m-%d %H:%M:%S').should == t.strftime('%Y-%m-%d %H:%M:%S')
  351. (t2.is_a?(Time) ? t2.usec : t2.strftime('%N').to_i/1000).should == t.usec
  352. end
  353. cspecify "should store milliseconds in time fields for DateTime objects", :do, :swift do
  354. t = DateTime.now
  355. @d << {:value=>1, :time=>t}
  356. t2 = @d[:value =>1][:time]
  357. @d.literal(t2).should == @d.literal(t)
  358. t2.strftime('%Y-%m-%d %H:%M:%S').should == t.strftime('%Y-%m-%d %H:%M:%S')
  359. (t2.is_a?(Time) ? t2.usec : t2.strftime('%N').to_i/1000).should == t.strftime('%N').to_i/1000
  360. end
  361. if POSTGRES_DB.adapter_scheme == :postgres
  362. specify "should handle infinite timestamps if convert_infinite_timestamps is set" do
  363. @d << {:time=>Sequel.cast('infinity', :timestamp)}
  364. @db.convert_infinite_timestamps = :nil
  365. @db[:test3].get(:time).should == nil
  366. @db.convert_infinite_timestamps = :string
  367. @db[:test3].get(:time).should == 'infinity'
  368. @db.convert_infinite_timestamps = :float
  369. @db[:test3].get(:time).should == 1.0/0.0
  370. @d.update(:time=>Sequel.cast('-infinity', :timestamp))
  371. @db.convert_infinite_timestamps = :nil
  372. @db[:test3].get(:time).should == nil
  373. @db.convert_infinite_timestamps = :string
  374. @db[:test3].get(:time).should == '-infinity'
  375. @db.convert_infinite_timestamps = :float
  376. @db[:test3].get(:time).should == -1.0/0.0
  377. end
  378. specify "should handle conversions from infinite strings/floats in models" do
  379. c = Class.new(Sequel::Model(:test3))
  380. @db.convert_infinite_timestamps = :float
  381. c.new(:time=>'infinity').time.should == 'infinity'
  382. c.new(:time=>'-infinity').time.should == '-infinity'
  383. c.new(:time=>1.0/0.0).time.should == 1.0/0.0
  384. c.new(:time=>-1.0/0.0).time.should == -1.0/0.0
  385. end
  386. end
  387. end
  388. describe "PostgreSQL's EXPLAIN and ANALYZE" do
  389. specify "should not raise errors" do
  390. @d = POSTGRES_DB[:test3]
  391. proc{@d.explain}.should_not raise_error
  392. proc{@d.analyze}.should_not raise_error
  393. end
  394. end
  395. describe "A PostgreSQL database" do
  396. before do
  397. @db = POSTGRES_DB
  398. end
  399. specify "should support column operations" do
  400. @db.create_table!(:test2){text :name; integer :value}
  401. @db[:test2] << {}
  402. @db[:test2].columns.should == [:name, :value]
  403. @db.add_column :test2, :xyz, :text, :default => '000'
  404. @db[:test2].columns.should == [:name, :value, :xyz]
  405. @db[:test2] << {:name => 'mmm', :value => 111}
  406. @db[:test2].first[:xyz].should == '000'
  407. @db[:test2].columns.should == [:name, :value, :xyz]
  408. @db.drop_column :test2, :xyz
  409. @db[:test2].columns.should == [:name, :value]
  410. @db[:test2].delete
  411. @db.add_column :test2, :xyz, :text, :default => '000'
  412. @db[:test2] << {:name => 'mmm', :value => 111, :xyz => 'qqqq'}
  413. @db[:test2].columns.should == [:name, :value, :xyz]
  414. @db.rename_column :test2, :xyz, :zyx
  415. @db[:test2].columns.should == [:name, :value, :zyx]
  416. @db[:test2].first[:zyx].should == 'qqqq'
  417. @db.add_column :test2, :xyz, :float
  418. @db[:test2].delete
  419. @db[:test2] << {:name => 'mmm', :value => 111, :xyz => 56.78}
  420. @db.set_column_type :test2, :xyz, :integer
  421. @db[:test2].first[:xyz].should == 57
  422. end
  423. specify "#locks should be a dataset returning database locks " do
  424. @db.locks.should be_a_kind_of(Sequel::Dataset)
  425. @db.locks.all.should be_a_kind_of(Array)
  426. end
  427. end
  428. describe "A PostgreSQL database" do
  429. before do
  430. @db = POSTGRES_DB
  431. @db.drop_table?(:posts)
  432. @db.sqls.clear
  433. end
  434. after do
  435. @db.drop_table?(:posts)
  436. end
  437. specify "should support resetting the primary key sequence" do
  438. @db.create_table(:posts){primary_key :a}
  439. @db[:posts].insert(:a=>20).should == 20
  440. @db[:posts].insert.should == 1
  441. @db[:posts].insert.should == 2
  442. @db[:posts].insert(:a=>10).should == 10
  443. @db.reset_primary_key_sequence(:posts).should == 21
  444. @db[:posts].insert.should == 21
  445. @db[:posts].order(:a).map(:a).should == [1, 2, 10, 20, 21]
  446. end
  447. specify "should support specifying Integer/Bignum/Fixnum types in primary keys and have them be auto incrementing" do
  448. @db.create_table(:posts){primary_key :a, :type=>Integer}
  449. @db[:posts].insert.should == 1
  450. @db[:posts].insert.should == 2
  451. @db.create_table!(:posts){primary_key :a, :type=>Fixnum}
  452. @db[:posts].insert.should == 1
  453. @db[:posts].insert.should == 2
  454. @db.create_table!(:posts){primary_key :a, :type=>Bignum}
  455. @db[:posts].insert.should == 1
  456. @db[:posts].insert.should == 2
  457. end
  458. specify "should not raise an error if attempting to resetting the primary key sequence for a table without a primary key" do
  459. @db.create_table(:posts){Integer :a}
  460. @db.reset_primary_key_sequence(:posts).should == nil
  461. end
  462. specify "should support opclass specification" do
  463. @db.create_table(:posts){text :title; text :body; integer :user_id; index(:user_id, :opclass => :int4_ops, :type => :btree)}
  464. @db.sqls.should == [
  465. 'CREATE TABLE "posts" ("title" text, "body" text, "user_id" integer)',
  466. 'CREATE INDEX "posts_user_id_index" ON "posts" USING btree ("user_id" int4_ops)'
  467. ]
  468. end
  469. specify "should support fulltext indexes and searching" do
  470. @db.create_table(:posts){text :title; text :body; full_text_index [:title, :body]; full_text_index :title, :language => 'french'}
  471. @db.sqls.should == [
  472. %{CREATE TABLE "posts" ("title" text, "body" text)},
  473. %{CREATE INDEX "posts_title_body_index" ON "posts" USING gin (to_tsvector('simple'::regconfig, (COALESCE("title", '') || ' ' || COALESCE("body", ''))))},
  474. %{CREATE INDEX "posts_title_index" ON "posts" USING gin (to_tsvector('french'::regconfig, (COALESCE("title", ''))))}
  475. ] if check_sqls
  476. @db[:posts].insert(:title=>'ruby rails', :body=>'yowsa')
  477. @db[:posts].insert(:title=>'sequel', :body=>'ruby')
  478. @db[:posts].insert(:title=>'ruby scooby', :body=>'x')
  479. @db.sqls.clear
  480. @db[:posts].full_text_search(:title, 'rails').all.should == [{:title=>'ruby rails', :body=>'yowsa'}]
  481. @db[:posts].full_text_search([:title, :body], ['yowsa', 'rails']).all.should == [:title=>'ruby rails', :body=>'yowsa']
  482. @db[:posts].full_text_search(:title, 'scooby', :language => 'french').all.should == [{:title=>'ruby scooby', :body=>'x'}]
  483. @db.sqls.should == [
  484. %{SELECT * FROM "posts" WHERE (to_tsvector('simple'::regconfig, (COALESCE("title", ''))) @@ to_tsquery('simple'::regconfig, 'rails'))},
  485. %{SELECT * FROM "posts" WHERE (to_tsvector('simple'::regconfig, (COALESCE("title", '') || ' ' || COALESCE("body", ''))) @@ to_tsquery('simple'::regconfig, 'yowsa | rails'))},
  486. %{SELECT * FROM "posts" WHERE (to_tsvector('french'::regconfig, (COALESCE("title", ''))) @@ to_tsquery('french'::regconfig, 'scooby'))}] if check_sqls
  487. @db[:posts].full_text_search(:title, :$n).call(:select, :n=>'rails').should == [{:title=>'ruby rails', :body=>'yowsa'}]
  488. @db[:posts].full_text_search(:title, :$n).prepare(:select, :fts_select).call(:n=>'rails').should == [{:title=>'ruby rails', :body=>'yowsa'}]
  489. end
  490. specify "should support spatial indexes" do
  491. @db.create_table(:posts){box :geom; spatial_index [:geom]}
  492. @db.sqls.should == [
  493. 'CREATE TABLE "posts" ("geom" box)',
  494. 'CREATE INDEX "posts_geom_index" ON "posts" USING gist ("geom")'
  495. ]
  496. end
  497. specify "should support indexes with index type" do
  498. @db.create_table(:posts){varchar :title, :size => 5; index :title, :type => 'hash'}
  499. @db.sqls.should == [
  500. 'CREATE TABLE "posts" ("title" varchar(5))',
  501. 'CREATE INDEX "posts_title_index" ON "posts" USING hash ("title")'
  502. ]
  503. end
  504. specify "should support unique indexes with index type" do
  505. @db.create_table(:posts){varchar :title, :size => 5; index :title, :type => 'btree', :unique => true}
  506. @db.sqls.should == [
  507. 'CREATE TABLE "posts" ("title" varchar(5))',
  508. 'CREATE UNIQUE INDEX "posts_title_index" ON "posts" USING btree ("title")'
  509. ]
  510. end
  511. specify "should support partial indexes" do
  512. @db.create_table(:posts){varchar :title, :size => 5; index :title, :where => {:title => '5'}}
  513. @db.sqls.should == [
  514. 'CREATE TABLE "posts" ("title" varchar(5))',
  515. 'CREATE INDEX "posts_title_index" ON "posts" ("title") WHERE ("title" = \'5\')'
  516. ]
  517. end
  518. specify "should support identifiers for table names in indicies" do
  519. @db.create_table(Sequel::SQL::Identifier.new(:posts)){varchar :title, :size => 5; index :title, :where => {:title => '5'}}
  520. @db.sqls.should == [
  521. 'CREATE TABLE "posts" ("title" varchar(5))',
  522. 'CREATE INDEX "posts_title_index" ON "posts" ("title") WHERE ("title" = \'5\')'
  523. ]
  524. end
  525. specify "should support renaming tables" do
  526. @db.create_table!(:posts1){primary_key :a}
  527. @db.rename_table(:posts1, :posts)
  528. end
  529. end
  530. describe "Postgres::Dataset#import" do
  531. before do
  532. @db = POSTGRES_DB
  533. @db.create_table!(:test){primary_key :x; Integer :y}
  534. @db.sqls.clear
  535. @ds = @db[:test]
  536. end
  537. after do
  538. @db.drop_table?(:test)
  539. end
  540. specify "#import should a single insert statement" do
  541. @ds.import([:x, :y], [[1, 2], [3, 4]])
  542. @db.sqls.should == ['BEGIN', 'INSERT INTO "test" ("x", "y") VALUES (1, 2), (3, 4)', 'COMMIT']
  543. @ds.all.should == [{:x=>1, :y=>2}, {:x=>3, :y=>4}]
  544. end
  545. specify "#import should work correctly when returning primary keys" do
  546. @ds.import([:x, :y], [[1, 2], [3, 4]], :return=>:primary_key).should == [1, 3]
  547. @ds.all.should == [{:x=>1, :y=>2}, {:x=>3, :y=>4}]
  548. end
  549. specify "#import should work correctly when returning primary keys with :slice option" do
  550. @ds.import([:x, :y], [[1, 2], [3, 4]], :return=>:primary_key, :slice=>1).should == [1, 3]
  551. @ds.all.should == [{:x=>1, :y=>2}, {:x=>3, :y=>4}]
  552. end
  553. specify "#import should work correctly with an arbitrary returning value" do
  554. @ds.returning(:y, :x).import([:x, :y], [[1, 2], [3, 4]]).should == [{:y=>2, :x=>1}, {:y=>4, :x=>3}]
  555. @ds.all.should == [{:x=>1, :y=>2}, {:x=>3, :y=>4}]
  556. end
  557. end
  558. describe "Postgres::Dataset#insert" do
  559. before do
  560. @db = POSTGRES_DB
  561. @db.create_table!(:test5){primary_key :xid; Integer :value}
  562. @db.sqls.clear
  563. @ds = @db[:test5]
  564. end
  565. after do
  566. @db.drop_table?(:test5)
  567. end
  568. specify "should work with static SQL" do
  569. @ds.with_sql('INSERT INTO test5 (value) VALUES (10)').insert.should == nil
  570. @db['INSERT INTO test5 (value) VALUES (20)'].insert.should == nil
  571. @ds.all.should == [{:xid=>1, :value=>10}, {:xid=>2, :value=>20}]
  572. end
  573. specify "should insert correctly if using a column array and a value array" do
  574. @ds.insert([:value], [10]).should == 1
  575. @ds.all.should == [{:xid=>1, :value=>10}]
  576. end
  577. specify "should use INSERT RETURNING" do
  578. @ds.insert(:value=>10).should == 1
  579. @db.sqls.last.should == 'INSERT INTO "test5" ("value") VALUES (10) RETURNING "xid"' if check_sqls
  580. end
  581. specify "should have insert_select insert the record and return the inserted record" do
  582. h = @ds.insert_select(:value=>10)
  583. h[:value].should == 10
  584. @ds.first(:xid=>h[:xid])[:value].should == 10
  585. end
  586. specify "should correctly return the inserted record's primary key value" do
  587. value1 = 10
  588. id1 = @ds.insert(:value=>value1)
  589. @ds.first(:xid=>id1)[:value].should == value1
  590. value2 = 20
  591. id2 = @ds.insert(:value=>value2)
  592. @ds.first(:xid=>id2)[:value].should == value2
  593. end
  594. specify "should return nil if the table has no primary key" do
  595. ds = POSTGRES_DB[:test4]
  596. ds.delete
  597. ds.insert(:name=>'a').should == nil
  598. end
  599. end
  600. describe "Postgres::Database schema qualified tables" do
  601. before do
  602. POSTGRES_DB << "CREATE SCHEMA schema_test"
  603. POSTGRES_DB.instance_variable_set(:@primary_keys, {})
  604. POSTGRES_DB.instance_variable_set(:@primary_key_sequences, {})
  605. end
  606. after do
  607. POSTGRES_DB << "DROP SCHEMA schema_test CASCADE"
  608. POSTGRES_DB.default_schema = nil
  609. end
  610. specify "should be able to create, drop, select and insert into tables in a given schema" do
  611. POSTGRES_DB.create_table(:schema_test__schema_test){primary_key :i}
  612. POSTGRES_DB[:schema_test__schema_test].first.should == nil
  613. POSTGRES_DB[:schema_test__schema_test].insert(:i=>1).should == 1
  614. POSTGRES_DB[:schema_test__schema_test].first.should == {:i=>1}
  615. POSTGRES_DB.from(Sequel.lit('schema_test.schema_test')).first.should == {:i=>1}
  616. POSTGRES_DB.drop_table(:schema_test__schema_test)
  617. POSTGRES_DB.create_table(Sequel.qualify(:schema_test, :schema_test)){integer :i}
  618. POSTGRES_DB[:schema_test__schema_test].first.should == nil
  619. POSTGRES_DB.from(Sequel.lit('schema_test.schema_test')).first.should == nil
  620. POSTGRES_DB.drop_table(Sequel.qualify(:schema_test, :schema_test))
  621. end
  622. specify "#tables should not include tables in a default non-public schema" do
  623. POSTGRES_DB.create_table(:schema_test__schema_test){integer :i}
  624. POSTGRES_DB.tables.should include(:schema_test)
  625. POSTGRES_DB.tables.should_not include(:pg_am)
  626. POSTGRES_DB.tables.should_not include(:domain_udt_usage)
  627. end
  628. specify "#tables should return tables in the schema provided by the :schema argument" do
  629. POSTGRES_DB.create_table(:schema_test__schema_test){integer :i}
  630. POSTGRES_DB.tables(:schema=>:schema_test).should == [:schema_test]
  631. end
  632. specify "#schema should not include columns from tables in a default non-public schema" do
  633. POSTGRES_DB.create_table(:schema_test__domains){integer :i}
  634. sch = POSTGRES_DB.schema(:domains)
  635. cs = sch.map{|x| x.first}
  636. cs.should include(:i)
  637. cs.should_not include(:data_type)
  638. end
  639. specify "#schema should only include columns from the table in the given :schema argument" do
  640. POSTGRES_DB.create_table!(:domains){integer :d}
  641. POSTGRES_DB.create_table(:schema_test__domains){integer :i}
  642. sch = POSTGRES_DB.schema(:domains, :schema=>:schema_test)
  643. cs = sch.map{|x| x.first}
  644. cs.should include(:i)
  645. cs.should_not include(:d)
  646. POSTGRES_DB.drop_table(:domains)
  647. end
  648. specify "#schema should raise an exception if columns from tables in two separate schema are returned" do
  649. POSTGRES_DB.create_table!(:public__domains){integer :d}
  650. POSTGRES_DB.create_table(:schema_test__domains){integer :i}
  651. begin
  652. proc{POSTGRES_DB.schema(:domains)}.should raise_error(Sequel::Error)
  653. POSTGRES_DB.schema(:public__domains).map{|x| x.first}.should == [:d]
  654. POSTGRES_DB.schema(:schema_test__domains).map{|x| x.first}.should == [:i]
  655. ensure
  656. POSTGRES_DB.drop_table?(:public__domains)
  657. end
  658. end
  659. specify "#table_exists? should see if the table is in a given schema" do
  660. POSTGRES_DB.create_table(:schema_test__schema_test){integer :i}
  661. POSTGRES_DB.table_exists?(:schema_test__schema_test).should == true
  662. end
  663. specify "should be able to get primary keys for tables in a given schema" do
  664. POSTGRES_DB.create_table(:schema_test__schema_test){primary_key :i}
  665. POSTGRES_DB.primary_key(:schema_test__schema_test).should == 'i'
  666. end
  667. specify "should be able to get serial sequences for tables in a given schema" do
  668. POSTGRES_DB.create_table(:schema_test__schema_test){primary_key :i}
  669. POSTGRES_DB.primary_key_sequence(:schema_test__schema_test).should == '"schema_test".schema_test_i_seq'
  670. end
  671. specify "should be able to get serial sequences for tables that have spaces in the name in a given schema" do
  672. POSTGRES_DB.create_table(:"schema_test__schema test"){primary_key :i}
  673. POSTGRES_DB.primary_key_sequence(:"schema_test__schema test").should == '"schema_test"."schema test_i_seq"'
  674. end
  675. specify "should be able to get custom sequences for tables in a given schema" do
  676. POSTGRES_DB << "CREATE SEQUENCE schema_test.kseq"
  677. POSTGRES_DB.create_table(:schema_test__schema_test){integer :j; primary_key :k, :type=>:integer, :default=>Sequel.lit("nextval('schema_test.kseq'::regclass)")}
  678. POSTGRES_DB.primary_key_sequence(:schema_test__schema_test).should == '"schema_test".kseq'
  679. end
  680. specify "should be able to get custom sequences for tables that have spaces in the name in a given schema" do
  681. POSTGRES_DB << "CREATE SEQUENCE schema_test.\"ks eq\""
  682. POSTGRES_DB.create_table(:"schema_test__schema test"){integer :j; primary_key :k, :type=>:integer, :default=>Sequel.lit("nextval('schema_test.\"ks eq\"'::regclass)")}
  683. POSTGRES_DB.primary_key_sequence(:"schema_test__schema test").should == '"schema_test"."ks eq"'
  684. end
  685. specify "#default_schema= should change the default schema used from public" do
  686. POSTGRES_DB.create_table(:schema_test__schema_test){primary_key :i}
  687. POSTGRES_DB.default_schema = :schema_test
  688. POSTGRES_DB.table_exists?(:schema_test).should == true
  689. POSTGRES_DB.tables.should == [:schema_test]
  690. POSTGRES_DB.primary_key(:schema_test__schema_test).should == 'i'
  691. POSTGRES_DB.primary_key_sequence(:schema_test__schema_test).should == '"schema_test".schema_test_i_seq'
  692. end
  693. end
  694. describe "Postgres::Database schema qualified tables and eager graphing" do
  695. before(:all) do
  696. @db = POSTGRES_DB
  697. @db.run "DROP SCHEMA s CASCADE" rescue nil
  698. @db.run "CREATE SCHEMA s"
  699. @db.create_table(:s__bands){primary_key :id; String :name}
  700. @db.create_table(:s__albums){primary_key :id; String :name; foreign_key :band_id, :s__bands}
  701. @db.create_table(:s__tracks){primary_key :id; String :name; foreign_key :album_id, :s__albums}
  702. @db.create_table(:s__members){primary_key :id; String :name; foreign_key :band_id, :s__bands}
  703. @Band = Class.new(Sequel::Model(:s__bands))
  704. @Album = Class.new(Sequel::Model(:s__albums))
  705. @Track = Class.new(Sequel::Model(:s__tracks))
  706. @Member = Class.new(Sequel::Model(:s__members))
  707. def @Band.name; :Band; end
  708. def @Album.name; :Album; end
  709. def @Track.name; :Track; end
  710. def @Member.name; :Member; end
  711. @Band.one_to_many :albums, :class=>@Album, :order=>:name
  712. @Band.one_to_many :members, :class=>@Member, :order=>:name
  713. @Album.many_to_one :band, :class=>@Band, :order=>:name
  714. @Album.one_to_many :tracks, :class=>@Track, :order=>:name
  715. @Track.many_to_one :album, :class=>@Album, :order=>:name
  716. @Member.many_to_one :band, :class=>@Band, :order=>:name
  717. @Member.many_to_many :members, :class=>@Member, :join_table=>:s__bands, :right_key=>:id, :left_key=>:id, :left_primary_key=>:band_id, :right_primary_key=>:band_id, :order=>:name
  718. @Band.many_to_many :tracks, :class=>@Track, :join_table=>:s__albums, :right_key=>:id, :right_primary_key=>:album_id, :order=>:name
  719. @b1 = @Band.create(:name=>"BM")
  720. @b2 = @Band.create(:name=>"J")
  721. @a1 = @Album.create(:name=>"BM1", :band=>@b1)
  722. @a2 = @Album.create(:name=>"BM2", :band=>@b1)
  723. @a3 = @Album.create(:name=>"GH", :band=>@b2)
  724. @a4 = @Album.create(:name=>"GHL", :band=>@b2)
  725. @t1 = @Track.create(:name=>"BM1-1", :album=>@a1)
  726. @t2 = @Track.create(:name=>"BM1-2", :album=>@a1)
  727. @t3 = @Track.create(:name=>"BM2-1", :album=>@a2)
  728. @t4 = @Track.create(:name=>"BM2-2", :album=>@a2)
  729. @m1 = @Member.create(:name=>"NU", :band=>@b1)
  730. @m2 = @Member.create(:name=>"TS", :band=>@b1)
  731. @m3 = @Member.create(:name=>"NS", :band=>@b2)
  732. @m4 = @Member.create(:name=>"JC", :band=>@b2)
  733. end
  734. after(:all) do
  735. @db.run "DROP SCHEMA s CASCADE"
  736. end
  737. specify "should return all eager graphs correctly" do
  738. bands = @Band.order(:bands__name).eager_graph(:albums).all
  739. bands.should == [@b1, @b2]
  740. bands.map{|x| x.albums}.should == [[@a1, @a2], [@a3, @a4]]
  741. bands = @Band.order(:bands__name).eager_graph(:albums=>:tracks).all
  742. bands.should == [@b1, @b2]
  743. bands.map{|x| x.albums}.should == [[@a1, @a2], [@a3, @a4]]
  744. bands.map{|x| x.albums.map{|y| y.tracks}}.should == [[[@t1, @t2], [@t3, @t4]], [[], []]]
  745. bands = @Band.order(:bands__name).eager_graph({:albums=>:tracks}, :members).all
  746. bands.should == [@b1, @b2]
  747. bands.map{|x| x.albums}.should == [[@a1, @a2], [@a3, @a4]]
  748. bands.map{|x| x.albums.map{|y| y.tracks}}.should == [[[@t1, @t2], [@t3, @t4]], [[], []]]
  749. bands.map{|x| x.members}.should == [[@m1, @m2], [@m4, @m3]]
  750. end
  751. specify "should have eager graphs work with previous joins" do
  752. bands = @Band.order(:bands__name).select_all(:s__bands).join(:s__members, :band_id=>:id).from_self(:alias=>:bands0).eager_graph(:albums=>:tracks).all
  753. bands.should == [@b1, @b2]
  754. bands.map{|x| x.albums}.should == [[@a1, @a2], [@a3, @a4]]
  755. bands.map{|x| x.albums.map{|y| y.tracks}}.should == [[[@t1, @t2], [@t3, @t4]], [[], []]]
  756. end
  757. specify "should have eager graphs work with joins with the same tables" do
  758. bands = @Band.order(:bands__name).select_all(:s__bands).join(:s__members, :band_id=>:id).eager_graph({:albums=>:tracks}, :members).all
  759. bands.should == [@b1, @b2]
  760. bands.map{|x| x.albums}.should == [[@a1, @a2], [@a3, @a4]]
  761. bands.map{|x| x.albums.map{|y| y.tracks}}.should == [[[@t1, @t2], [@t3, @t4]], [[], []]]
  762. bands.map{|x| x.members}.should == [[@m1, @m2], [@m4, @m3]]
  763. end
  764. specify "should have eager graphs work with self referential associations" do
  765. bands = @Band.order(:bands__name).eager_graph(:tracks=>{:album=>:band}).all
  766. bands.should == [@b1, @b2]
  767. bands.map{|x| x.tracks}.should == [[@t1, @t2, @t3, @t4], []]
  768. bands.map{|x| x.tracks.map{|y| y.album}}.should == [[@a1, @a1, @a2, @a2], []]
  769. bands.map{|x| x.tracks.map{|y| y.album.band}}.should == [[@b1, @b1, @b1, @b1], []]
  770. members = @Member.order(:members__name).eager_graph(:members).all
  771. members.should == [@m4, @m3, @m1, @m2]
  772. members.map{|x| x.members}.should == [[@m4, @m3], [@m4, @m3], [@m1, @m2], [@m1, @m2]]
  773. members = @Member.order(:members__name).eager_graph(:band, :members=>:band).all
  774. members.should == [@m4, @m3, @m1, @m2]
  775. members.map{|x| x.band}.should == [@b2, @b2, @b1, @b1]
  776. members.map{|x| x.members}.should == [[@m4, @m3], [@m4, @m3], [@m1, @m2], [@m1, @m2]]
  777. members.map{|x| x.members.map{|y| y.band}}.should == [[@b2, @b2], [@b2, @b2], [@b1, @b1], [@b1, @b1]]
  778. end
  779. specify "should have eager graphs work with a from_self dataset" do
  780. bands = @Band.order(:bands__name).from_self.eager_graph(:tracks=>{:album=>:band}).all
  781. bands.should == [@b1, @b2]
  782. bands.map{|x| x.tracks}.should == [[@t1, @t2, @t3, @t4], []]
  783. bands.map{|x| x.tracks.map{|y| y.album}}.should == [[@a1, @a1, @a2, @a2], []]
  784. bands.map{|x| x.tracks.map{|y| y.album.band}}.should == [[@b1, @b1, @b1, @b1], []]
  785. end
  786. specify "should have eager graphs work with different types of aliased from tables" do
  787. bands = @Band.order(:tracks__name).from(:s__bands___tracks).eager_graph(:tracks).all
  788. bands.should == [@b1, @b2]
  789. bands.map{|x| x.tracks}.should == [[@t1, @t2, @t3, @t4], []]
  790. bands = @Band.order(:tracks__name).from(Sequel.expr(:s__bands).as(:tracks)).eager_graph(:tracks).all
  791. bands.should == [@b1, @b2]
  792. bands.map{|x| x.tracks}.should == [[@t1, @t2, @t3, @t4], []]
  793. bands = @Band.order(:tracks__name).from(Sequel.expr(:s__bands).as(Sequel.identifier(:tracks))).eager_graph(:tracks).all
  794. bands.should == [@b1, @b2]
  795. bands.map{|x| x.tracks}.should == [[@t1, @t2, @t3, @t4], []]
  796. bands = @Band.order(:tracks__name).from(Sequel.expr(:s__bands).as('tracks')).eager_graph(:tracks).all
  797. bands.should == [@b1, @b2]
  798. bands.map{|x| x.tracks}.should == [[@t1, @t2, @t3, @t4], []]
  799. end
  800. specify "should have eager graphs work with join tables with aliases" do
  801. bands = @Band.order(:bands__name).eager_graph(:members).join(:s__albums___tracks, :band_id=>Sequel.qualify(:s__bands, :id)).eager_graph(:albums=>:tracks).all
  802. bands.should == [@b1, @b2]
  803. bands.map{|x| x.albums}.should == [[@a1, @a2], [@a3, @a4]]
  804. bands.map{|x| x.members}.should == [[@m1, @m2], [@m4, @m3]]
  805. bands = @Band.order(:bands__name).eager_graph(:members).join(Sequel.as(:s__albums, :tracks), :band_id=>Sequel.qualify(:s__bands, :id)).eager_graph(:albums=>:tracks).all
  806. bands.should == [@b1, @b2]
  807. bands.map{|x| x.albums}.should == [[@a1, @a2], [@a3, @a4]]
  808. bands.map{|x| x.members}.should == [[@m1, @m2], [@m4, @m3]]
  809. bands = @Band.order(:bands__name).eager_graph(:members).join(Sequel.as(:s__albums, 'tracks'), :band_id=>Sequel.qualify(:s__bands, :id)).eager_graph(:albums=>:tracks).all
  810. bands.should == [@b1, @b2]
  811. bands.map{|x| x.albums}.should == [[@a1, @a2], [@a3, @a4]]
  812. bands.map{|x| x.members}.should == [[@m1, @m2], [@m4, @m3]]
  813. bands = @Band.order(:bands__name).eager_graph(:members).join(Sequel.as(:s__albums, Sequel.identifier(:tracks)), :band_id=>Sequel.qualify(:s__bands, :id)).eager_graph(:albums=>:tracks).all
  814. bands.should == [@b1, @b2]
  815. bands.map{|x| x.albums}.should == [[@a1, @a2], [@a3, @a4]]
  816. bands.map{|x| x.members}.should == [[@m1, @m2], [@m4, @m3]]
  817. bands = @Band.order(:bands__name).eager_graph(:members).join(:s__albums, {:band_id=>Sequel.qualify(:s__bands, :id)}, :table_alias=>:tracks).eager_graph(:albums=>:tracks).all
  818. bands.should == [@b1, @b2]
  819. bands.map{|x| x.albums}.should == [[@a1, @a2], [@a3, @a4]]
  820. bands.map{|x| x.members}.should == [[@m1, @m2], [@m4, @m3]]
  821. bands = @Band.order(:bands__name).eager_graph(:members).join(:s__albums, {:band_id=>Sequel.qualify(:s__bands, :id)}, :table_alias=>'tracks').eager_graph(:albums=>:tracks).all
  822. bands.should == [@b1, @b2]
  823. bands.map{|x| x.albums}.should == [[@a1, @a2], [@a3, @a4]]
  824. bands.map{|x| x.members}.should == [[@m1, @m2], [@m4, @m3]]
  825. bands = @Band.order(:bands__name).eager_graph(:members).join(:s__albums, {:band_id=>Sequel.qualify(:s__bands, :id)}, :table_alias=>Sequel.identifier(:tracks)).eager_graph(:albums=>:tracks).all
  826. bands.should == [@b1, @b2]
  827. bands.map{|x| x.albums}.should == [[@a1, @a2], [@a3, @a4]]
  828. bands.map{|x| x.members}.should == [[@m1, @m2], [@m4, @m3]]
  829. end
  830. specify "should have eager graphs work with different types of qualified from tables" do
  831. bands = @Band.order(:bands__name).from(Sequel.qualify(:s, :bands)).eager_graph(:tracks).all
  832. bands.should == [@b1, @b2]
  833. bands.map{|x| x.tracks}.should == [[@t1, @t2, @t3, @t4], []]
  834. bands = @Band.order(:bands__name).from(Sequel.identifier(:bands).qualify(:s)).eager_graph(:tracks).all
  835. bands.should == [@b1, @b2]
  836. bands.map{|x| x.tracks}.should == [[@t1, @t2, @t3, @t4], []]
  837. bands = @Band.order(:bands__name).from(Sequel::SQL::QualifiedIdentifier.new(:s, 'bands')).eager_graph(:tracks).all
  838. bands.should == [@b1, @b2]
  839. bands.map{|x| x.tracks}.should == [[@t1, @t2, @t3, @t4], []]
  840. end
  841. end
  842. if POSTGRES_DB.server_version >= 80300
  843. POSTGRES_DB.create_table! :test6 do
  844. text :title
  845. text :body
  846. full_text_index [:title, :body]
  847. end
  848. describe "PostgreSQL tsearch2" do
  849. before do
  850. @ds = POSTGRES_DB[:test6]
  851. end
  852. after do
  853. POSTGRES_DB[:test6].delete
  854. end
  855. specify "should search by indexed column" do
  856. record = {:title => "oopsla conference", :body => "test"}
  857. @ds << record
  858. @ds.full_text_search(:title, "oopsla").all.should include(record)
  859. end
  860. specify "should join multiple coumns with spaces to search by last words in row" do
  861. record = {:title => "multiple words", :body => "are easy to search"}
  862. @ds << record
  863. @ds.full_text_search([:title, :body], "words").all.should include(record)
  864. end
  865. specify "should return rows with a NULL in one column if a match in another column" do
  866. record = {:title => "multiple words", :body =>nil}
  867. @ds << record
  868. @ds.full_text_search([:title, :body], "words").all.should include(record)
  869. end
  870. end
  871. end
  872. if POSTGRES_DB.dataset.supports_window_functions?
  873. describe "Postgres::Dataset named windows" do
  874. before do
  875. @db = POSTGRES_DB
  876. @db.create_table!(:i1){Integer :id; Integer :group_id; Integer :amount}
  877. @ds = @db[:i1].order(:id)
  878. @ds.insert(:id=>1, :group_id=>1, :amount=>1)
  879. @ds.insert(:id=>2, :group_id=>1, :amount=>10)
  880. @ds.insert(:id=>3, :group_id=>1, :amount=>100)
  881. @ds.insert(:id=>4, :group_id=>2, :amount=>1000)
  882. @ds.insert(:id=>5, :group_id=>2, :amount=>10000)
  883. @ds.insert(:id=>6, :group_id=>2, :amount=>100000)
  884. end
  885. after do
  886. @db.drop_table?(:i1)
  887. end
  888. specify "should give correct results for window functions" do
  889. @ds.window(:win, :partition=>:group_id, :order=>:id).select(:id){sum(:over, :args=>amount, :window=>win){}}.all.should ==
  890. [{:sum=>1, :id=>1}, {:sum=>11, :id=>2}, {:sum=>111, :id=>3}, {:sum=>1000, :id=>4}, {:sum=>11000, :id=>5}, {:sum=>111000, :id=>6}]
  891. @ds.window(:win, :partition=>:group_id).select(:id){sum(:over, :args=>amount, :window=>win, :order=>id){}}.all.should ==
  892. [{:sum=>1, :id=>1}, {:sum=>11, :id=>2}, {:sum=>111, :id=>3}, {:sum=>1000, :id=>4}, {:sum=>11000, :id=>5}, {:sum=>111000, :id=>6}]
  893. @ds.window(:win, {}).select(:id){sum(:over, :args=>amount, :window=>:win, :order=>id){}}.all.should ==
  894. [{:sum=>1, :id=>1}, {:sum=>11, :id=>2}, {:sum=>111, :id=>3}, {:sum=>1111, :id=>4}, {:sum=>11111, :id=>5}, {:sum=>111111, :id=>6}]
  895. @ds.window(:win, :partition=>:group_id).select(:id){sum(:over, :args=>amount, :window=>:win, :order=>id, :frame=>:all){}}.all.should ==
  896. [{:sum=>111, :id=>1}, {:sum=>111, :id=>2}, {:sum=>111, :id=>3}, {:sum=>111000, :id=>4}, {:sum=>111000, :id=>5}, {:sum=>111000, :id=>6}]
  897. end
  898. end
  899. end
  900. describe "Postgres::Database functions, languages, schemas, and triggers" do
  901. before do
  902. @d = POSTGRES_DB
  903. end
  904. after do
  905. @d.drop_function('tf', :if_exists=>true, :cascade=>true)
  906. @d.drop_function('tf', :if_exists=>true, :cascade=>true, :args=>%w'integer integer')
  907. @d.drop_language(:plpgsql, :if_exists=>true, :cascade=>true) if @d.server_version < 90000
  908. @d.drop_schema(:sequel, :if_exists=>true, :cascade=>true)
  909. @d.drop_table?(:test)
  910. end
  911. specify "#create_function and #drop_function should create and drop functions" do
  912. proc{@d['SELECT tf()'].all}.should raise_error(Sequel::DatabaseError)
  913. args = ['tf', 'SELECT 1', {:returns=>:integer}]
  914. @d.send(:create_function_sql, *args).should =~ /\A\s*CREATE FUNCTION tf\(\)\s+RETURNS integer\s+LANGUAGE SQL\s+AS 'SELECT 1'\s*\z/
  915. @d.create_function(*args)
  916. rows = @d['SELECT tf()'].all.should == [{:tf=>1}]
  917. @d.send(:drop_function_sql, 'tf').should == 'DROP FUNCTION tf()'
  918. @d.drop_function('tf')
  919. proc{@d['SELECT tf()'].all}.should raise_error(Sequel::DatabaseError)
  920. end
  921. specify "#create_function and #drop_function should support options" do
  922. args = ['tf', 'SELECT $1 + $2', {:args=>[[:integer, :a], :integer], :replace=>true, :returns=>:integer, :language=>'SQL', :behavior=>:immutable, :strict=>true, :security_definer=>true, :cost=>2, :set=>{:search_path => 'public'}}]
  923. @d.send(:create_function_sql,*args).should =~ /\A\s*CREATE OR REPLACE FUNCTION tf\(a integer, integer\)\s+RETURNS integer\s+LANGUAGE SQL\s+IMMUTABLE\s+STRICT\s+SECURITY DEFINER\s+COST 2\s+SET search_path = public\s+AS 'SELECT \$1 \+ \$2'\s*\z/
  924. @d.create_function(*args)
  925. # Make sure replace works
  926. @d.create_function(*args)
  927. rows = @d['SELECT tf(1, 2)'].all.should == [{:tf=>3}]
  928. args = ['tf', {:if_exists=>true, :cascade=>true, :args=>[[:integer, :a], :integer]}]
  929. @d.send(:drop_function_sql,*args).should == 'DROP FUNCTION IF EXISTS tf(a integer, integer) CASCADE'
  930. @d.drop_function(*args)
  931. # Make sure if exists works
  932. @d.drop_function(*args)
  933. end
  934. specify "#create_language and #drop_language should create and drop languages" do
  935. @d.send(:create_language_sql, :plpgsql).should == 'CREATE LANGUAGE plpgsql'
  936. @d.create_language(:plpgsql, :replace=>true) if @d.server_version < 90000
  937. proc{@d.create_language(:plpgsql)}.should raise_error(Sequel::DatabaseError)
  938. @d.send(:drop_language_sql, :plpgsql).should == 'DROP LANGUAGE plpgsql'
  939. @d.drop_language(:plpgsql) if @d.server_version < 90000
  940. proc{@d.drop_language(:plpgsql)}.should raise_error(Sequel::DatabaseError) if @d.server_version < 90000
  941. @d.send(:create_language_sql, :plpgsql, :replace=>true, :trusted=>true, :handler=>:a, :validator=>:b).should == (@d.server_version >= 90000 ? 'CREATE OR REPLACE TRUSTED LANGUAGE plpgsql HANDLER a VALIDATOR b' : 'CREATE TRUSTED LANGUAGE plpgsql HANDLER a VALIDATOR b')
  942. @d.send(:drop_language_sql, :plpgsql, :if_exists=>true, :cascade=>true).should == 'DROP LANGUAGE IF EXISTS plpgsql CASCADE'
  943. # Make sure if exists works
  944. @d.drop_language(:plpgsql, :if_exists=>true, :cascade=>true) if @d.server_version < 90000
  945. end
  946. specify "#create_schema and #drop_schema should create and drop schemas" do
  947. @d.send(:create_schema_sql, :sequel).should == 'CREATE SCHEMA "sequel"'
  948. @d.send(:drop_schema_sql, :sequel).should == 'DROP SCHEMA "sequel"'
  949. @d.send(:drop_schema_sql, :sequel, :if_exists=>true, :cascade=>true).should == 'DROP SCHEMA IF EXISTS "sequel" CASCADE'
  950. @d.create_schema(:sequel)
  951. @d.create_table(:sequel__test){Integer :a}
  952. @d.drop_schema(:sequel, :if_exists=>true, :cascade=>true)
  953. end
  954. specify "#create_trigger and #drop_trigger should create and drop triggers" do
  955. @d.create_language(:plpgsql) if @d.server_version < 90000
  956. @d.create_function(:tf, 'BEGIN IF NEW.value IS NULL THEN RAISE EXCEPTION \'Blah\'; END IF; RETURN NEW; END;', :language=>:plpgsql, :returns=>:trigger)
  957. @d.send(:create_trigger_sql, :test, :identity, :tf, :each_row=>true).should == 'CREATE TRIGGER identity BEFORE INSERT OR UPDATE OR DELETE ON "test" FOR EACH ROW EXECUTE PROCEDURE tf()'
  958. @d.create_table(:test){String :name; Integer :value}
  959. @d.create_trigger(:test, :identity, :tf, :each_row=>true)
  960. @d[:test].insert(:name=>'a', :value=>1)
  961. @d[:test].filter(:name=>'a').all.should == [{:name=>'a', :value=>1}]
  962. proc{@d[:test].filter(:name=>'a').update(:value=>nil)}.should raise_error(Sequel::DatabaseError)
  963. @d[:test].filter(:name=>'a').all.should == [{:name=>'a', :value=>1}]
  964. @d[:test].filter(:name=>'a').update(:value=>3)
  965. @d[:test].filter(:name=>'a').all.should == [{:name=>'a', :value=>3}]
  966. @d.send(:drop_trigger_sql, :test, :identity).should == 'DROP TRIGGER identity ON "test"'
  967. @d.drop_trigger(:test, :identity)
  968. @d.send(:create_trigger_sql, :test, :identity, :tf, :after=>true, :events=>:insert, :args=>[1, 'a']).should == 'CREATE TRIGGER identity AFTER INSERT ON "test" EXECUTE PROCEDURE tf(1, \'a\')'
  969. @d.send(:drop_trigger_sql, :test, :identity, :if_exists=>true, :cascade=>true).should == 'DROP TRIGGER IF EXISTS identity ON "test" CASCADE'
  970. # Make sure if exists works
  971. @d.drop_trigger(:test, :identity, :if_exists=>true, :cascade=>true)
  972. end
  973. end
  974. if PO

Large files files are truncated, but you can click here to view the full file