diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3294d4a6d..382cae041 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,17 +1,11 @@ name: CI -on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - schedule: - - cron: '0 4 * * 1' +on: [push, pull_request] jobs: test: name: Run test suite - runs-on: ubuntu-20.04 # TODO: Change back to 'ubuntu-latest' when https://github.com/microsoft/mssql-docker/issues/899 resolved. + runs-on: ubuntu-latest env: COMPOSE_FILE: compose.ci.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index c74508b03..c0553615f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,34 @@ +## v8.0.6 + +#### Fixed + +- [#1318](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/pull/1318) Reverse order of values when upserting +- [#1321](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/pull/1321) Fix SQL statement to calculate `updated_at` when upserting + +## v8.0.5 + +#### Added + +- [#1315](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/pull/1315) Add support for `insert_all` and `upsert_all` + +## v8.0.4 + +#### Fixed + +- [#1308](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/pull/1308) Fix retrieval of temporary table's column information. + +## v8.0.3 + +#### Fixed + +- [#1306](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/pull/1306) Fix affected rows count when lowercase schema reflection enabled + +## v8.0.2 + +#### Fixed + +- [#1272](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/pull/1272) Fix parsing of raw table name from SQL with extra parentheses + ## v8.0.1 #### Fixed diff --git a/README.md b/README.md index 29e2ed32b..6d42d0a8f 100644 --- a/README.md +++ b/README.md @@ -168,6 +168,22 @@ ActiveRecord::ConnectionAdapters::SQLServerAdapter.showplan_option = 'SHOWPLAN_X ``` **NOTE:** The method we utilize to make SHOWPLANs work is very brittle to complex SQL. There is no getting around this as we have to deconstruct an already prepared statement for the sp_executesql method. If you find that explain breaks your app, simple disable it. Do not open a github issue unless you have a patch. Please [consult the Rails guides](http://guides.rubyonrails.org/active_record_querying.html#running-explain) for more info. +#### `insert_all` / `upsert_all` support + +`insert_all` and `upsert_all` on other database system like MySQL, SQlite or PostgreSQL use a clause with their `INSERT` statement to either skip duplicates (`ON DUPLICATE KEY IGNORE`) or to update the existing record (`ON DUPLICATE KEY UPDATE`). Microsoft SQL Server does not offer these clauses, so the support for these two options is implemented slightly different. + +Behind the scenes, we execute a `MERGE` query, which joins your data that you want to insert or update into the table existing on the server. The emphasis here is "JOINING", so we also need to remove any duplicates that might make the `JOIN` operation fail, e.g. something like this: + +```ruby +Book.insert_all [ + { id: 200, author_id: 8, name: "Refactoring" }, + { id: 200, author_id: 8, name: "Refactoring" } +] +``` + +The removal of duplicates happens during the SQL query. + +Because of this implementation, if you pass `on_duplicate` to `upsert_all`, make sure to assign your value to `target.[column_name]` (e.g. `target.status = GREATEST(target.status, 1)`). To access the values that you want to upsert, use `source.[column_name]`. ## New Rails Applications diff --git a/VERSION b/VERSION index cd1d2e94f..68d92dd66 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -8.0.1 +8.0.6 diff --git a/lib/active_record/connection_adapters/sqlserver/database_statements.rb b/lib/active_record/connection_adapters/sqlserver/database_statements.rb index 9e36d61aa..0473afb8e 100644 --- a/lib/active_record/connection_adapters/sqlserver/database_statements.rb +++ b/lib/active_record/connection_adapters/sqlserver/database_statements.rb @@ -39,7 +39,8 @@ def cast_result(raw_result) end def affected_rows(raw_result) - raw_result.first['AffectedRows'] + column_name = lowercase_schema_reflection ? 'affectedrows' : 'AffectedRows' + raw_result.first[column_name] end def raw_execute(sql, name = nil, binds = [], prepare: false, async: false, allow_retry: false, materialize_transactions: true, batch: false) @@ -142,18 +143,56 @@ def default_insert_value(column) private :default_insert_value def build_insert_sql(insert) # :nodoc: - sql = +"INSERT #{insert.into}" - - if returning = insert.send(:insert_all).returning - returning_sql = if returning.is_a?(String) - returning - else - Array(returning).map { |column| "INSERTED.#{quote_column_name(column)}" }.join(", ") - end - sql << " OUTPUT #{returning_sql}" + # Use regular insert if not skipping/updating duplicates. + return build_sql_for_regular_insert(insert:) unless insert.skip_duplicates? || insert.update_duplicates? + + insert_all = insert.send(:insert_all) + columns_with_uniqueness_constraints = get_columns_with_uniqueness_constraints(insert_all:, insert:) + + # If we do not have any columns that might have conflicting values just execute a regular insert, else use merge. + if columns_with_uniqueness_constraints.flatten.empty? + build_sql_for_regular_insert(insert:) + else + build_sql_for_merge_insert(insert:, insert_all:, columns_with_uniqueness_constraints:) end + end + + + def build_sql_for_merge_insert(insert:, insert_all:, columns_with_uniqueness_constraints:) # :nodoc: + insert_all.inserts.reverse! if insert.update_duplicates? + + sql = <<~SQL + MERGE INTO #{insert.model.quoted_table_name} WITH (UPDLOCK, HOLDLOCK) AS target + USING ( + SELECT * + FROM ( + SELECT #{insert.send(:columns_list)}, #{partition_by_columns_with_uniqueness_constraints(columns_with_uniqueness_constraints:)} + FROM (#{insert.values_list}) + AS t1 (#{insert.send(:columns_list)}) + ) AS ranked_source + WHERE #{is_first_record_across_all_uniqueness_constraints(columns_with_uniqueness_constraints:)} + ) AS source + ON (#{joining_on_columns_with_uniqueness_constraints(columns_with_uniqueness_constraints:)}) + SQL + + if insert.update_duplicates? + sql << " WHEN MATCHED THEN UPDATE SET " + + if insert.raw_update_sql? + sql << insert.raw_update_sql + else + if insert.record_timestamps? + sql << build_sql_for_recording_timestamps_when_updating(insert:) + end + + sql << insert.updatable_columns.map { |column| "target.#{quote_column_name(column)}=source.#{quote_column_name(column)}" }.join(",") + end + end + sql << " WHEN NOT MATCHED BY TARGET THEN" + sql << " INSERT (#{insert.send(:columns_list)}) VALUES (#{insert_all.keys_including_timestamps.map { |column| "source.#{quote_column_name(column)}" }.join(", ")})" + sql << build_sql_for_returning(insert:, insert_all: insert.send(:insert_all)) + sql << ";" - sql << " #{insert.values_list}" sql end @@ -405,11 +444,18 @@ def query_requires_identity_insert?(sql) raw_table_name = get_raw_table_name(sql) id_column = identity_columns(raw_table_name).first - id_column && sql =~ /^\s*(INSERT|EXEC sp_executesql N'INSERT)[^(]+\([^)]*\b(#{id_column.name})\b,?[^)]*\)/i ? SQLServer::Utils.extract_identifiers(raw_table_name).quoted : false + if id_column && ( + sql =~ /^\s*(INSERT|EXEC sp_executesql N'INSERT)[^(]+\([^)]*\b(#{id_column.name})\b,?[^)]*\)/i || + sql =~ /^\s*MERGE INTO.+THEN INSERT \([^)]*\b(#{id_column.name})\b,?[^)]*\)/im + ) + SQLServer::Utils.extract_identifiers(raw_table_name).quoted + else + false + end end def insert_sql?(sql) - !(sql =~ /\A\s*(INSERT|EXEC sp_executesql N'INSERT)/i).nil? + !(sql =~ /\A\s*(INSERT|EXEC sp_executesql N'INSERT|MERGE INTO.+THEN INSERT)/im).nil? end def identity_columns(table_name) @@ -454,6 +500,96 @@ def internal_raw_execute(sql, raw_connection, perform_do: false) perform_do ? result.do : result end + + # === SQLServer Specific (insert_all / upsert_all support) ===================== # + def build_sql_for_returning(insert:, insert_all:) + return "" unless insert_all.returning + + returning_values_sql = if insert_all.returning.is_a?(String) + insert_all.returning + else + Array(insert_all.returning).map do |attribute| + if insert.model.attribute_alias?(attribute) + "INSERTED.#{quote_column_name(insert.model.attribute_alias(attribute))} AS #{quote_column_name(attribute)}" + else + "INSERTED.#{quote_column_name(attribute)}" + end + end.join(",") + end + + " OUTPUT #{returning_values_sql}" + end + private :build_sql_for_returning + + def get_columns_with_uniqueness_constraints(insert_all:, insert:) + if (unique_by = insert_all.unique_by) + [unique_by.columns] + else + # Compare against every unique constraint (primary key included). + # Discard constraints that are not fully included on insert.keys. Prevents invalid queries. + # Example: ignore unique index for columns ["name"] if insert keys is ["description"] + (insert_all.send(:unique_indexes).map(&:columns) + [insert_all.primary_keys]).select do |columns| + columns.to_set.subset?(insert.keys) + end + end + end + private :get_columns_with_uniqueness_constraints + + def build_sql_for_regular_insert(insert:) + sql = "INSERT #{insert.into}" + sql << build_sql_for_returning(insert:, insert_all: insert.send(:insert_all)) + sql << " #{insert.values_list}" + sql + end + private :build_sql_for_regular_insert + + # why is the "PARTITION BY" clause needed? + # in every DBMS system, insert_all / upsert_all is usually implemented with INSERT, that allows to define what happens + # when duplicates are found (SKIP OR UPDATE) + # by default rows are considered to be unique by every unique index on the table + # but since we have to use MERGE in MSSQL, which in return is a JOIN, we have to perform the "de-duplication" ourselves + # otherwise the "JOIN" clause would complain about non-unique values and being unable to JOIN the two tables + # this works easiest by using PARTITION and make sure that any record + # we are trying to insert is "the first one seen across all the potential columns with uniqueness constraints" + def partition_by_columns_with_uniqueness_constraints(columns_with_uniqueness_constraints:) + columns_with_uniqueness_constraints.map.with_index do |group_of_columns_with_uniqueness_constraints, index| + <<~PARTITION_BY + ROW_NUMBER() OVER ( + PARTITION BY #{group_of_columns_with_uniqueness_constraints.map { |column| quote_column_name(column) }.join(",")} + ORDER BY #{group_of_columns_with_uniqueness_constraints.map { |column| "#{quote_column_name(column)} DESC" }.join(",")} + ) AS rn_#{index} + PARTITION_BY + end.join(", ") + end + private :partition_by_columns_with_uniqueness_constraints + + def is_first_record_across_all_uniqueness_constraints(columns_with_uniqueness_constraints:) + columns_with_uniqueness_constraints.map.with_index do |group_of_columns_with_uniqueness_constraints, index| + "rn_#{index} = 1" + end.join(" AND ") + end + private :is_first_record_across_all_uniqueness_constraints + + def joining_on_columns_with_uniqueness_constraints(columns_with_uniqueness_constraints:) + columns_with_uniqueness_constraints.map do |columns| + columns.map do |column| + "target.#{quote_column_name(column)} = source.#{quote_column_name(column)}" + end.join(" AND ") + end.join(") OR (") + end + private :joining_on_columns_with_uniqueness_constraints + + # normally, generating the CASE SQL is done entirely by Rails + # and you would just hook into "touch_model_timestamps_unless" to add your database-specific instructions + # however, since we need to have "target." for the assignment, we also generate the CASE switch ourselves + def build_sql_for_recording_timestamps_when_updating(insert:) + insert.model.timestamp_attributes_for_update_in_model.filter_map do |column_name| + if insert.send(:touch_timestamp_attribute?, column_name) + "target.#{quote_column_name(column_name)}=CASE WHEN (#{insert.updatable_columns.map { |column| "(source.#{quote_column_name(column)} = target.#{quote_column_name(column)} OR (source.#{quote_column_name(column)} IS NULL AND target.#{quote_column_name(column)} IS NULL))" }.join(" AND ")}) THEN target.#{quote_column_name(column_name)} ELSE #{high_precision_current_timestamp} END," + end + end.join + end + private :build_sql_for_recording_timestamps_when_updating end end end diff --git a/lib/active_record/connection_adapters/sqlserver/schema_statements.rb b/lib/active_record/connection_adapters/sqlserver/schema_statements.rb index 1cc1a1b67..207070f37 100644 --- a/lib/active_record/connection_adapters/sqlserver/schema_statements.rb +++ b/lib/active_record/connection_adapters/sqlserver/schema_statements.rb @@ -592,12 +592,22 @@ def column_type(ci:) end def column_definitions_sql(database, identifier) - object_name = prepared_statements ? "@0" : quote(identifier.object) - schema_name = if identifier.schema.blank? - "schema_name()" - else - prepared_statements ? "@1" : quote(identifier.schema) - end + schema_name = "schema_name()" + + if prepared_statements + object_name = "@0" + schema_name = "@1" if identifier.schema.present? + else + object_name = quote(identifier.object) + schema_name = quote(identifier.schema) if identifier.schema.present? + end + + object_id_arg = identifier.schema.present? ? "CONCAT(#{schema_name},'.',#{object_name})" : object_name + + if identifier.temporary_table? + database = "TEMPDB" + object_id_arg = "CONCAT('#{database}','..',#{object_name})" + end %{ SELECT @@ -652,7 +662,7 @@ def column_definitions_sql(database, identifier) AND k.unique_index_id = ic.index_id AND c.column_id = ic.column_id WHERE - o.name = #{object_name} + o.Object_ID = Object_ID(#{object_id_arg}) AND s.name = #{schema_name} ORDER BY c.column_id @@ -674,7 +684,7 @@ def remove_check_constraints(table_name, column_name) end def remove_default_constraint(table_name, column_name) - # If their are foreign keys in this table, we could still get back a 2D array, so flatten just in case. + # If there are foreign keys in this table, we could still get back a 2D array, so flatten just in case. execute_procedure(:sp_helpconstraint, table_name, "nomsg").flatten.select do |row| row["constraint_type"] == "DEFAULT on column #{column_name}" end.each do |row| @@ -710,8 +720,10 @@ def get_raw_table_name(sql) .match(/\s*([^(]*)/i)[0] elsif s.match?(/^\s*UPDATE\s+.*/i) s.match(/UPDATE\s+([^\(\s]+)\s*/i)[1] + elsif s.match?(/^\s*MERGE INTO.*/i) + s.match(/^\s*MERGE\s+INTO\s+(\[?[a-z_ -]+\]?\.?\[?[a-z_ -]+\]?)\s+(AS|WITH|USING)/i)[1] else - s.match(/FROM\s+((\[[^\(\]]+\])|[^\(\s]+)\s*/i)[1] + s.match(/FROM[\s|\(]+((\[[^\(\]]+\])|[^\(\s]+)\s*/i)[1] end.strip end diff --git a/lib/active_record/connection_adapters/sqlserver/utils.rb b/lib/active_record/connection_adapters/sqlserver/utils.rb index 002847919..5ebfeeb07 100644 --- a/lib/active_record/connection_adapters/sqlserver/utils.rb +++ b/lib/active_record/connection_adapters/sqlserver/utils.rb @@ -81,6 +81,10 @@ def hash parts.hash end + def temporary_table? + object.start_with?("#") + end + protected def parse_raw_name diff --git a/lib/active_record/connection_adapters/sqlserver_adapter.rb b/lib/active_record/connection_adapters/sqlserver_adapter.rb index 314eacce1..3b3fd2a6d 100644 --- a/lib/active_record/connection_adapters/sqlserver_adapter.rb +++ b/lib/active_record/connection_adapters/sqlserver_adapter.rb @@ -3,6 +3,7 @@ require "tiny_tds" require "base64" require "active_record" +require "active_record/connection_adapters/statement_pool" require "arel_sqlserver" require "active_record/connection_adapters/sqlserver/core_ext/active_record" require "active_record/connection_adapters/sqlserver/core_ext/explain" @@ -211,11 +212,11 @@ def supports_insert_returning? end def supports_insert_on_duplicate_skip? - false + true end def supports_insert_on_duplicate_update? - false + true end def supports_insert_conflict_target? diff --git a/test/cases/adapter_test_sqlserver.rb b/test/cases/adapter_test_sqlserver.rb index 96cc70bf0..a225f0349 100644 --- a/test/cases/adapter_test_sqlserver.rb +++ b/test/cases/adapter_test_sqlserver.rb @@ -7,11 +7,13 @@ require "models/subscriber" require "models/minimalistic" require "models/college" +require "models/discount" class AdapterTestSQLServer < ActiveRecord::TestCase fixtures :tasks let(:basic_insert_sql) { "INSERT INTO [funny_jokes] ([name]) VALUES('Knock knock')" } + let(:basic_merge_sql) { "MERGE INTO [ships] WITH (UPDLOCK, HOLDLOCK) AS target USING ( SELECT * FROM ( SELECT [id], [name], ROW_NUMBER() OVER ( PARTITION BY [id] ORDER BY [id] DESC ) AS rn_0 FROM ( VALUES (101, N'RSS Sir David Attenborough') ) AS t1 ([id], [name]) ) AS ranked_source WHERE rn_0 = 1 ) AS source ON (target.[id] = source.[id]) WHEN MATCHED THEN UPDATE SET target.[name] = source.[name]" } let(:basic_update_sql) { "UPDATE [customers] SET [address_street] = NULL WHERE [id] = 2" } let(:basic_select_sql) { "SELECT * FROM [customers] WHERE ([customers].[id] = 1)" } @@ -92,6 +94,7 @@ class AdapterTestSQLServer < ActiveRecord::TestCase it "return unquoted table name object from basic INSERT UPDATE and SELECT statements" do assert_equal "funny_jokes", connection.send(:get_table_name, basic_insert_sql) + assert_equal "ships", connection.send(:get_table_name, basic_merge_sql) assert_equal "customers", connection.send(:get_table_name, basic_update_sql) assert_equal "customers", connection.send(:get_table_name, basic_select_sql) end @@ -183,6 +186,24 @@ class AdapterTestSQLServer < ActiveRecord::TestCase assert_equal "Favorite number?", SSTestUppered.last.column1 assert SSTestUppered.columns_hash["column2"] end + + it "destroys model with no associations" do + connection.lowercase_schema_reflection = true + + assert_nothing_raised do + discount = Discount.create! + discount.destroy! + end + end + + it "destroys model with association" do + connection.lowercase_schema_reflection = true + + assert_nothing_raised do + post = Post.create!(title: 'Setup', body: 'Record to be deleted') + post.destroy! + end + end end describe "identity inserts" do @@ -194,6 +215,10 @@ class AdapterTestSQLServer < ActiveRecord::TestCase @identity_insert_sql_unquoted_sp = "EXEC sp_executesql N'INSERT INTO funny_jokes (id, name) VALUES (@0, @1)', N'@0 int, @1 nvarchar(255)', @0 = 420, @1 = N'Knock knock'" @identity_insert_sql_unordered_sp = "EXEC sp_executesql N'INSERT INTO [funny_jokes] ([name],[id]) VALUES (@0, @1)', N'@0 nvarchar(255), @1 int', @0 = N'Knock knock', @1 = 420" + @identity_merge_sql = "MERGE INTO [ships] WITH (UPDLOCK, HOLDLOCK) AS target USING ( SELECT * FROM ( SELECT [id], [name], ROW_NUMBER() OVER ( PARTITION BY [id] ORDER BY [id] DESC ) AS rn_0 FROM ( VALUES (101, N'RSS Sir David Attenborough') ) AS t1 ([id], [name]) ) AS ranked_source WHERE rn_0 = 1 ) AS source ON (target.[id] = source.[id]) WHEN MATCHED THEN UPDATE SET target.[name] = source.[name] WHEN NOT MATCHED BY TARGET THEN INSERT ([id], [name]) VALUES (source.[id], source.[name]) OUTPUT INSERTED.[id]" + @identity_merge_sql_unquoted = "MERGE INTO ships WITH (UPDLOCK, HOLDLOCK) AS target USING ( SELECT * FROM ( SELECT id, name, ROW_NUMBER() OVER ( PARTITION BY id ORDER BY id DESC ) AS rn_0 FROM ( VALUES (101, N'RSS Sir David Attenborough') ) AS t1 (id, name) ) AS ranked_source WHERE rn_0 = 1 ) AS source ON (target.id = source.id) WHEN MATCHED THEN UPDATE SET target.name = source.name WHEN NOT MATCHED BY TARGET THEN INSERT (id, name) VALUES (source.id, source.name) OUTPUT INSERTED.id" + @identity_merge_sql_unordered = "MERGE INTO [ships] WITH (UPDLOCK, HOLDLOCK) AS target USING ( SELECT * FROM ( SELECT [name], [id], ROW_NUMBER() OVER ( PARTITION BY [id] ORDER BY [id] DESC ) AS rn_0 FROM ( VALUES (101, N'RSS Sir David Attenborough') ) AS t1 ([name], [id]) ) AS ranked_source WHERE rn_0 = 1 ) AS source ON (target.[id] = source.[id]) WHEN MATCHED THEN UPDATE SET target.[name] = source.[name] WHEN NOT MATCHED BY TARGET THEN INSERT ([name], [id]) VALUES (source.[name], source.[id]) OUTPUT INSERTED.[id]" + @identity_insert_sql_non_dbo = "INSERT INTO [test].[aliens] ([id],[name]) VALUES(420,'Mork')" @identity_insert_sql_non_dbo_unquoted = "INSERT INTO test.aliens ([id],[name]) VALUES(420,'Mork')" @identity_insert_sql_non_dbo_unordered = "INSERT INTO [test].[aliens] ([name],[id]) VALUES('Mork',420)" @@ -210,6 +235,10 @@ class AdapterTestSQLServer < ActiveRecord::TestCase assert_equal "[funny_jokes]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_unquoted_sp) assert_equal "[funny_jokes]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_unordered_sp) + assert_equal "[ships]", connection.send(:query_requires_identity_insert?, @identity_merge_sql) + assert_equal "[ships]", connection.send(:query_requires_identity_insert?, @identity_merge_sql_unquoted) + assert_equal "[ships]", connection.send(:query_requires_identity_insert?, @identity_merge_sql_unordered) + assert_equal "[test].[aliens]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_non_dbo) assert_equal "[test].[aliens]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_non_dbo_unquoted) assert_equal "[test].[aliens]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_non_dbo_unordered) @@ -219,7 +248,7 @@ class AdapterTestSQLServer < ActiveRecord::TestCase end it "return false to #query_requires_identity_insert? for normal SQL" do - [basic_insert_sql, basic_update_sql, basic_select_sql].each do |sql| + [basic_insert_sql, basic_merge_sql, basic_update_sql, basic_select_sql].each do |sql| assert !connection.send(:query_requires_identity_insert?, sql), "SQL was #{sql}" end end diff --git a/test/cases/coerced_tests.rb b/test/cases/coerced_tests.rb index 1ffa0694c..ba07f96c0 100644 --- a/test/cases/coerced_tests.rb +++ b/test/cases/coerced_tests.rb @@ -376,6 +376,22 @@ def test_payload_row_count_on_raw_sql_coerced end class CalculationsTest < ActiveRecord::TestCase + # SELECT columns must be in the GROUP clause. + coerce_tests! :test_should_count_with_group_by_qualified_name_on_loaded + def test_should_count_with_group_by_qualified_name_on_loaded_coerced + accounts = Account.group("accounts.id").select("accounts.id") + + expected = {1 => 1, 2 => 1, 3 => 1, 4 => 1, 5 => 1, 6 => 1} + + assert_not_predicate accounts, :loaded? + assert_equal expected, accounts.count + + accounts.load + + assert_predicate accounts, :loaded? + assert_equal expected, accounts.count(:id) + end + # Fix randomly failing test. The loading of the model's schema was affecting the test. coerce_tests! :test_offset_is_kept def test_offset_is_kept_coerced @@ -1183,6 +1199,9 @@ def test_add_on_delete_restrict_foreign_key_coerced end end + # SQL Server does not support 'restrict' for 'on_update' or 'on_delete'. + coerce_tests! :test_remove_foreign_key_with_restrict_action + # Error message depends on the database adapter. coerce_tests! :test_add_foreign_key_with_if_not_exists_not_set def test_add_foreign_key_with_if_not_exists_not_set_coerced @@ -2197,35 +2216,6 @@ class EnumTest < ActiveRecord::TestCase end end -require "models/task" -class QueryCacheExpiryTest < ActiveRecord::TestCase - # SQL Server does not support skipping or upserting duplicates. - coerce_tests! :test_insert_all - def test_insert_all_coerced - assert_raises(ArgumentError, /does not support skipping duplicates/) do - Task.cache { Task.insert({ starting: Time.now }) } - end - - assert_raises(ArgumentError, /does not support upsert/) do - Task.cache { Task.upsert({ starting: Time.now }) } - end - - assert_raises(ArgumentError, /does not support upsert/) do - Task.cache { Task.upsert_all([{ starting: Time.now }]) } - end - - Task.cache do - assert_called(ActiveRecord::Base.connection_pool.query_cache, :clear, times: 1) do - Task.insert_all!([ starting: Time.now ]) - end - - assert_called(ActiveRecord::Base.connection_pool.query_cache, :clear, times: 1) do - Task.insert!({ starting: Time.now }) - end - end - end -end - require "models/citation" class EagerLoadingTooManyIdsTest < ActiveRecord::TestCase fixtures :citations @@ -2504,6 +2494,24 @@ def test_insert_with_type_casting_and_serialize_is_consistent_coerced Book.where(author_id: nil, name: '["Array"]').delete_all Book.lease_connection.add_index(:books, [:author_id, :name], unique: true) end + + # Same as original but using target.status for assignment and CASE instead of GREATEST for operator + coerce_tests! :test_upsert_all_updates_using_provided_sql + def test_upsert_all_updates_using_provided_sql_coerced + Book.upsert_all( + [{id: 1, status: 1}, {id: 2, status: 1}], + on_duplicate: Arel.sql(<<~SQL + target.status = CASE + WHEN target.status > 1 THEN target.status + ELSE 1 + END + SQL + ) + ) + + assert_equal "published", Book.find(1).status + assert_equal "written", Book.find(2).status + end end module ActiveRecord @@ -2521,21 +2529,6 @@ def invalid_add_column_option_exception_message(key) end end -# SQL Server does not support upsert. Removed dependency on `insert_all` that uses upsert. -class ActiveRecord::Encryption::ConcurrencyTest < ActiveRecord::EncryptionTestCase - undef_method :thread_encrypting_and_decrypting - def thread_encrypting_and_decrypting(thread_label) - posts = 100.times.collect { |index| EncryptedPost.create! title: "Article #{index} (#{thread_label})", body: "Body #{index} (#{thread_label})" } - - Thread.new do - posts.each.with_index do |article, index| - assert_encrypted_attribute article, :title, "Article #{index} (#{thread_label})" - article.decrypt - assert_not_encrypted_attribute article, :title, "Article #{index} (#{thread_label})" - end - end - end -end # Need to use `install_unregistered_type_fallback` instead of `install_unregistered_type_error` so that message-pack # can read and write `ActiveRecord::ConnectionAdapters::SQLServer::Type::Data` objects. diff --git a/test/cases/insert_all_test_sqlserver.rb b/test/cases/insert_all_test_sqlserver.rb new file mode 100644 index 000000000..6367d57d4 --- /dev/null +++ b/test/cases/insert_all_test_sqlserver.rb @@ -0,0 +1,45 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" +require "models/book" +require "models/sqlserver/recurring_task" + +class InsertAllTestSQLServer < ActiveRecord::TestCase + # Test ported from the Rails `main` branch that is not on the `8-0-stable` branch. + def test_insert_all_only_applies_last_value_when_given_duplicate_identifiers + skip unless supports_insert_on_duplicate_skip? + + Book.insert_all [ + { id: 111, name: "expected_new_name" }, + { id: 111, name: "unexpected_new_name" } + ] + assert_equal "expected_new_name", Book.find(111).name + end + + # Test ported from the Rails `main` branch that is not on the `8-0-stable` branch. + def test_upsert_all_only_applies_last_value_when_given_duplicate_identifiers + skip unless supports_insert_on_duplicate_update? && !current_adapter?(:PostgreSQLAdapter) + + Book.create!(id: 112, name: "original_name") + + Book.upsert_all [ + { id: 112, name: "unexpected_new_name" }, + { id: 112, name: "expected_new_name" } + ] + assert_equal "expected_new_name", Book.find(112).name + end + + test "upsert_all recording of timestamps works with mixed datatypes" do + task = RecurringTask.create!( + key: "abcdef", + priority: 5 + ) + + RecurringTask.upsert_all([{ + id: task.id, + priority: nil + }]) + + assert_not_equal task.updated_at, RecurringTask.find(task.id).updated_at + end +end diff --git a/test/cases/schema_test_sqlserver.rb b/test/cases/schema_test_sqlserver.rb index f9dbde8e6..3d61a9604 100644 --- a/test/cases/schema_test_sqlserver.rb +++ b/test/cases/schema_test_sqlserver.rb @@ -101,5 +101,29 @@ class SchemaTestSQLServer < ActiveRecord::TestCase assert_equal "[with].[select notation]", connection.send(:get_raw_table_name, "INSERT INTO [with].[select notation] SELECT * FROM [table_name]") end end + + describe "MERGE statements" do + it do + assert_equal "[dashboards]", connection.send(:get_raw_table_name, "MERGE INTO [dashboards] AS target") + end + + it do + assert_equal "lock_without_defaults", connection.send(:get_raw_table_name, "MERGE INTO lock_without_defaults AS target") + end + + it do + assert_equal "[WITH - SPACES]", connection.send(:get_raw_table_name, "MERGE INTO [WITH - SPACES] AS target") + end + + it do + assert_equal "[with].[select notation]", connection.send(:get_raw_table_name, "MERGE INTO [with].[select notation] AS target") + end + end + + describe 'CREATE VIEW statements' do + it do + assert_equal "test_table_as", connection.send(:get_raw_table_name, "CREATE VIEW test_views ( test_table_a_id, test_table_b_id ) AS SELECT test_table_as.id as test_table_a_id, test_table_bs.id as test_table_b_id FROM (test_table_as with(nolock) LEFT JOIN test_table_bs with(nolock) ON (test_table_as.id = test_table_bs.test_table_a_id))") + end + end end end diff --git a/test/cases/temporary_table_test_sqlserver.rb b/test/cases/temporary_table_test_sqlserver.rb new file mode 100644 index 000000000..0ab808a70 --- /dev/null +++ b/test/cases/temporary_table_test_sqlserver.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class TemporaryTableSQLServer < ActiveRecord::TestCase + def test_insert_into_temporary_table + ActiveRecord::Base.with_connection do |conn| + conn.exec_query("CREATE TABLE #temp_users (id INT IDENTITY(1,1), name NVARCHAR(100))") + + result = conn.exec_query("SELECT * FROM #temp_users") + assert_equal 0, result.count + + conn.exec_query("INSERT INTO #temp_users (name) VALUES ('John'), ('Doe')") + + result = conn.exec_query("SELECT * FROM #temp_users") + assert_equal 2, result.count + end + end +end diff --git a/test/models/sqlserver/recurring_task.rb b/test/models/sqlserver/recurring_task.rb new file mode 100644 index 000000000..aecf7c462 --- /dev/null +++ b/test/models/sqlserver/recurring_task.rb @@ -0,0 +1,3 @@ +class RecurringTask < ActiveRecord::Base + self.table_name = "recurring_tasks" +end diff --git a/test/schema/sqlserver_specific_schema.rb b/test/schema/sqlserver_specific_schema.rb index a5160f791..f4b600d64 100644 --- a/test/schema/sqlserver_specific_schema.rb +++ b/test/schema/sqlserver_specific_schema.rb @@ -360,4 +360,12 @@ name varchar(255) ) TABLE_IN_OTHER_SCHEMA_USED_BY_MODEL + + create_table "recurring_tasks", force: true do |t| + t.string :key + t.integer :priority, default: 0 + + t.datetime2 :created_at + t.datetime2 :updated_at + end end