diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 000000000..6d4c1a2af --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,30 @@ +# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.191.1/containers/ruby/.devcontainer/base.Dockerfile + +# [Choice] Ruby version: 3, 3.0, 2, 2.7, 2.6 +ARG VARIANT="3" +FROM mcr.microsoft.com/devcontainers/ruby:${VARIANT} + +# TinyTDS +RUN apt-get -y install libc6-dev \ + && wget http://www.freetds.org/files/stable/freetds-1.4.14.tar.gz \ + && tar -xzf freetds-1.4.14.tar.gz \ + && cd freetds-1.4.14 \ + && ./configure --prefix=/usr/local --with-tdsver=7.3 \ + && make \ + && make install + +# Install the SQL Server command-line tools +RUN curl https://packages.microsoft.com/keys/microsoft.asc | sudo tee /etc/apt/trusted.gpg.d/microsoft.asc \ + && curl https://packages.microsoft.com/config/ubuntu/22.04/prod.list | sudo tee /etc/apt/sources.list.d/mssql-release.list \ + && apt-get update \ + && ACCEPT_EULA=Y apt-get install -y mssql-tools18 unixodbc-dev \ + && echo 'export PATH="$PATH:/opt/mssql-tools18/bin"' >> ~/.bashrc \ + && echo 'export PATH="$PATH:/opt/mssql-tools18/bin"' >> /root/.bashrc + +# Add the SQL Server main Gemfile and install the gems. +RUN mkdir -p /tmp/activerecord-sqlserver-adapter +COPY Gemfile VERSION activerecord-sqlserver-adapter.gemspec /tmp/activerecord-sqlserver-adapter/ +RUN cd /tmp/activerecord-sqlserver-adapter \ + && RAILS_BRANCH=main bundle install \ + && rm -rf /tmp/activerecord-sqlserver-adapter +RUN chown -R vscode:vscode /usr/local/rvm diff --git a/.devcontainer/boot.sh b/.devcontainer/boot.sh new file mode 100755 index 000000000..83e81f691 --- /dev/null +++ b/.devcontainer/boot.sh @@ -0,0 +1,22 @@ +sudo chown -R vscode:vscode /usr/local/bundle + +# Wait for 10 seconds to make sure SQL Server came up. +sleep 10 + +# Setup test databases and users. +/opt/mssql-tools18/bin/sqlcmd -C -S sqlserver -U sa -P "MSSQLadmin!" < + +## Expected behavior + + +## Actual behavior + + +## How to reproduce + + +## Details + +- **Rails version**: `x.x.x` +- **SQL Server adapter version**: `x.x.x` +- **TinyTDS version**: `x.x.x` +- **FreeTDS details**: + + ``` + run `tsql -C` and paste here the output. + ``` + diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 000000000..36f6f9aa4 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,60 @@ +name: CI + +on: + push: + branches: [ main ] + pull_request: + branches: [ main ] + schedule: + - cron: '0 4 * * 0' + +jobs: + test: + name: Run test suite + runs-on: ubuntu-latest + timeout-minutes: 10 + + env: + COMPOSE_FILE: compose.ci.yaml + + strategy: + fail-fast: false + matrix: + ruby: + - 3.4.7 + - 3.3.9 + - 3.2.9 + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Build docker images + run: docker compose build --build-arg TARGET_VERSION=${{ matrix.ruby }} + + - name: Run tests + run: docker compose run ci + + + standardrb: + name: Code linting and formatting + runs-on: ubuntu-latest + + env: + COMPOSE_FILE: compose.ci.yaml + + strategy: + fail-fast: false + matrix: + ruby: + - 3.4.1 + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Build docker images + run: docker compose build --build-arg TARGET_VERSION=${{ matrix.ruby }} + + - name: Run standardrb + run: docker compose run standardrb diff --git a/.gitignore b/.gitignore index 1ce5ead37..6365e37d2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,20 @@ nbproject/ -debug.log +debug.log* .DS_Store pkg/ doc/ +db/ *.gem .bundle Gemfile.lock test/profile/output/* .rvmrc .rbenv-version +.tool-versions +.idea +coverage/* +.flooignore +.floo +.byebug_history +tmp/* +test/storage/test.sqlite3* diff --git a/.rubocop.yml b/.rubocop.yml new file mode 100644 index 000000000..048b917d8 --- /dev/null +++ b/.rubocop.yml @@ -0,0 +1,29 @@ +AllCops: + TargetRubyVersion: 2.5 + +Layout/LineLength: + Max: 120 + +Naming/AccessorMethodName: + Enabled: false + +Naming/FileName: + Exclude: + - lib/activerecord-sqlserver-adapter.rb + +Naming/MethodName: + IgnoredPatterns: + - visit_.* + - primary_Key_From_Table + - table_From_Statement + - distinct_One_As_One_Is_So_Not_Fetch + - make_Fetch_Possible_And_Deterministic + +Naming/MethodParameterName: + Enabled: false + +Naming/PredicateName: + Enabled: false + +Style/StringLiterals: + EnforcedStyle: double_quotes diff --git a/CHANGELOG b/CHANGELOG deleted file mode 100644 index 45f7cfcf8..000000000 --- a/CHANGELOG +++ /dev/null @@ -1,574 +0,0 @@ - -* 3.2.10 * - -* Remove connection defaults for host/username/password. Since we want to suppoert Windows Authentication - and there are just to many possibilities. So we now have to be explicit. -* Remove really old TinyTDS warning. - - -* 3.2.9 * - -* The #remove_default_constraint uses #execute_procedure now. Fixes #223. Thanks @gicappa and @clintmiller. -* Mimic other adapters quoting for empty strings passed to integer columns. Fixes #164. -* Allow named parameters in stored procedures. Fixes #216. Thanks @sixfeetover. -* Make sure exclude [__rnt] table names form relation reflection. Fixes #219 and #221. Thanks @sphogan. - - -* 3.2.8 * - -* Include VERSION in gemspec's files. - - -* 3.2.7 * - -* Find VERSION in base file out of module namespace. Fixes #208 -* Better support for explain without sp_execute args. FIxes #207 - - -* 3.2.6 * - -* Unique has_many associations with pagination now work. Fixes #209 - - -* 3.2.5 * - -* Fix a few test from ActiveRecord 3.2.6 upgrade. - -* Fix db_name usage bug in #column_definitions [Altonymous] - - -* 3.2.4 * - -* Fixed schema reflection for identity columns using ODBC. Fixes #193. - - -* 3.2.3 * - -* Fixed datetime quoting for ActiveSupport::TimeWithZone objects. Fixes #187 and #189. - - -* 3.2.2 * - -* Fixes all known issues with cross database schema reflection. Fixes #185 [Chris Altman] - -* Fix exists? with offset by patching visitor. Fixes #171 and Fixes #167 - -* Set default text size to 2147483647 for TinyTDS connections. Fixes #181 - -* Set @config ivar for 3rd party libs. Fixes #177 - -* Make #sql_type_for_statement work for integers that may have empty parens or none at all. Fixes #175 - - -* 3.2.1 * - -* Add explicit order-by clause for windowed results. Fixes #161. - - -* 3.2.0 * - -* ActiveRecord explain (SHOWPLAN) support. - http://youtu.be/ckb3YYZZZ2Q - -* Remove our log_info_schema_queries config since we are not hooking properly into AR's 'SCHEMA' names. - -* Properly use 'SCHEMA' name arguement in DB statements to comply with ActiveRecord::ExplainSubscriber::IGNORED_PAYLOADS. - -* Make use of the new ConnectionAdapters::SchemaCache for our needs. - -* New Sqlserver::Utils class for out helpers. Moved table name unquotes there. - - -* 3.1.5 * - -* Better support for orders with an expression. Fixes #155. [Jason Frey, Joe Rafaniello] - - -* 3.1.4 * - -* Use INFORMATION_SCHEMA.KEY_COLUMN_USAGE for schema reflection speed. - Fixes #125. [Wüthrich Hannes @hwuethrich] - -* New deadlock victim retry using the #retry_deadlock_victim config. [Jason Frey, Joe Rafaniello] - -* Renamed #with_auto_reconnect to #with_sqlserver_error_handling now that it handles both dropped - connections and deadlock victim errors. Fixes #150 [Jason Frey, Joe Rafaniello] - -* Add activity_stats method that mimics the SQL Server Activity Monitor. Fixes #146 [Jason Frey, Joe Rafaniello] - -* Add methods for sqlserver's #product_version, #product_level, #edition and include them in inspect. - Fixes #145 [Jason Frey, Joe Rafaniello] - -* Handle statements that cannot be retried on a new database connection by not reconnecting. - Fixes #147 [Jason Frey, Joe Rafaniello] - -* Added connection#spid for debugging. Fixes #144 [Jason Frey, Joe Rafaniello] - -* Add ENV['TEST_FILES'] to Rakefile for easy single case tests. [Jason Frey, Joe Rafaniello] - -* Pass ActiveRecord tests. Made windowed distinct pass all orders to groups. - - test_limited_eager_with_multiple_order_columns - - test_limited_eager_with_order - -* Pass AR tests by moving DISTINCT to GROUP BY in windowed SQL. - - test_count_eager_with_has_many_and_limit_and_high_offset - - test_eager_with_has_many_and_limit_and_high_offset - - -* 3.1.3 * - -* Distinguish between identity and primary key key columns during schema reflection. Allows us - us to only do identity inserts when technically needed. Fixes #139 [chadcf] & [joncanady] - - -* 3.1.2 * - -* Fix SQL Azure conflicts with DBCC useroptions. Use new #user_options_xyz methods. [kazamachi] - -* Fix identity inserts for tables with natural PKs. [Gian Carlo Pace] - -* Create a #configure_connection method that can be overridden. Think "SET TEXTSIZE...". - -* Create a #configure_application_name method that can be overridden for unique TinyTDS app names - -* Fixed the #finish_statement_handle to cancel the TinyTDS connection if needed. - - -* 3.1.1 * - -* Make #rollback_db_transaction smarter. - -* Provide a method to override for the quoted string prefix. Not a config because trumping this method will - have drastically bad results. Fixes #124 - -* Allow :limit/:offset to be used with fully qualified table and column in :select. - - -* 3.1.0 * - -* Add support/test around handling of float/real column types [Lucas Maxwell] - -* Make auto reconnect duration configurable. Fixes #109 [David Chelimsky] - -* Quote most time objects to use ISO8601 format to be multi-language dateformat compatible. The [datetime] data type is - automatically limited to milliseconds while [time] & [datetimeoffset] have full support. Even included a Date/Time - ActiveSupport formatter that is used per the language settings of the connection. - -* Include a visit_Arel_Nodes_UpdateStatement method in our Arel visitor to add a limit/top for update - that has order and no limit/top. https://github.com/rails/rails/commit/787194ee43ab1fb0a7dc8bfbbfbd5079b047d833 - -* Allow drop_database to be called even when DB does not exist. - -* Remove totally broken ADONET connection mode. Want it back, submit a patch. - -* Schema reflection now finds primary key for all occasions. Fixed #60 [Boško Ivanišević] - -* Allow complex order objects to not be molested by our visitor overrides. Fixes #99 - -* Default unicode datatypes! - -* New #lowercase_schema_reflection configuration that allows you to downcase all tables and columns. - Good for legacy databases. Fixes #86. Thanks @dmajkic. - -* Rails 3.1 with prepared statement support. Uses "EXEC sp_executesql ..." for just about everything now. - - -* 3.0.15 * - -* Way better schema support! Thanks to @ianic! Fixes #61 - -* Warn of possible permission problems if "EXEC sp_helptext..." does not work view. Fixes #73. - - -* 3.0.13/3.0.14 * - -* Allow TinyTDS/DBLIB mode to pass down :host/:port config options. - - -* 3.0.12 * - -* Bug fix for previous TinyTDS lost connections. - - -* 3.0.11 * - -* Azure compatibility. - -* TinyTDS enhancements for lost connections. Default connection mode. - - -* 3.0.10 * - -* Fix #rowtable_orders visitor helper to use first column if no pk column was found. - -* Flatten sp_helpconstraint when looking for constraints just in case fks are present. Issue #64. - -* Start to support 2011 code named "Denali". - -* Limit and Offset can take SqlLiteral objects now. - - -* 3.0.9 * - -* Fix array literal parsing bug for ruby 1.9. - - -* 3.0.8 * - -* Support for ActiveRecord v3.0.3 and ARel v2.0.7 - - -* 3.0.7 * - -* Properly quote table names when reflecting on views. - -* Add "dead or not enabled" to :dblib's lost connection messages. - - -* 3.0.6 * - -* Maintenance release. Lock down to ActiveRecord 3.0.1 using ARel 1.0.0. - - -* 3.0.5 * - -* Fixed native database type memoization, now at connection instance level. Fix #execute_procedure for :dblib mode to return indifferent access rows too. - -* Make login timeout and query timeout backward database.yml friendly for :dblib mode. - - -* 3.0.4 * - -* Add multiple results set support with #execute_procedure for :dblib mode. [Ken Collins] - -* Simplify encoding support. [Ken Collins] - -* Add binary timestamp datatype handling. [Erik Bryn] - - -* 3.0.3 - -* Add TinyTDS/dblib connection mode. [Ken Collins] - - -* 3.0.2 - -* Fix DSN'less code. [Erik Bryn] - - -* 3.0.1 - -* Support DSN'less connections. Resolves ticket 38. - -* Support upcoming ruby odbc 0.99992 - - -* 3.0.0 - -* Release rails 3 version! - - -* 2.3.8 - -* Properly quote all database names in rake helper methods. [Ken Collins] - - -* 2.3.7 - -* Correctly use :date/:time SQL types in 2008 [Ken Collins] - - -* 2.3.6 - -* Allow DNS's to not contain a database and use what is in database.yml [Marco Mastrodonato] - -* Rake tasks methods for vanilla rails :db namespace parity. [Ken Collins] - -* IronRuby integrated security fixes [Jimmy Schementi] - - -* 2.3.5 - -* Initial IronRuby ADONET connection mode support baked right in. Removed most &block - parameters, no handle/request object yielded anymore. Better abstraction and compliance - per the ActiveRecord abstract adapter to not yielding handles for #execute and only for - low level #select. Better wrapping of all queries at lowest level in #log so exceptions - at anytime can be handled correctly by core AR. Critical for System::Data's command - readers. Better abstraction for introspecting on #connection_mode. Added support for - running singular test cases via TextMate's Command-R. [Ken Collins] - -* Force a binary encoding on values coming in and out of those columns for ruby 1.9. - Fixes ticket #33 [Jeroen Zwartepoorte] - -* Using change_column will leave default if the type does not change or a new default - is not included. Fixes issue #22. [Ransom Briggs] - -* Use correct SP name for sp_MSforeachtable so any collation can get to it. [7to3] - -* Qualify INFORMATION_SCHEMA.COLUMNS with a correct period DB name if present. - -* Allow adapter to return multiple results sets, for example from stored procedures. [Chris Hall] - - -* 2.3.4 - -* For tables that named with schema(ex. rails.users), they could not get length of column. - column of varchar(40) gets length => nil. Ticket #27 & #15 [Ken Tachiya] - -* Altered limited_update_conditions regex conditions, the .* would greedily fail - if the where_sql had WHERE in a table or field, etc. [Ransom Briggs] - -* Changing test to allow ENV['ARUNIT_DB_NAME'] as the database name for the test units. - Matches up with AR conventions. [Ransom Briggs] - - -2.3.3 - -* Revert #ad83df82 and again cache column information at the connection's instance. The - previous commit was causing all sorts of view and schema reflection problems. [Ken Collins] - - -2.3.2 - -* Insert queries that include the word "insert" as a partial column name with the word - "id" as a value were falsely being matched as identity inserts. [Sean Caffery/bfabry] - -* Delegate all low level #raw_connection calls to #raw_connection_run and #raw_connection_do - which abstract out the low level modes in the connection options at that point. [Ken Collins] - -* Remove DBI dependency and go straight ODBC for speed improvement [Erik Bryn] - -* Leave order by alone when same column crosses two tables [Ransom Briggs] - - -* 2.3 * (December 1st, 2009) - -* Table and column aliases can handle many. Resolves ticket #19 [stonegao] - -* Coerce a few tests that were failing in 2.3.x [Ken Collins] - -* Change column/view cache to happen at class level. Allows connection pool to share same - caches as well as the ability to expire the caches when needed. Also fix change_column so - that exceptions are not raised when the column contains an existing default. [Ken Collins] - -* Allow query_requires_identity_insert? method to return quoted table name in situations where the - INSERT parts are not quoted themselves. [Gary/iawgens, Richard Penwell, Ken Collins] - -* Fixed namespace in calling test_sqlserver_odbc within test_unicode_types. [Gary/iawgens] - -* Columns with multi-line defaults work correctly. [bfabry] - - -* 2.2.22 * (October 15th, 2009) - -* Support Identity-key-column judgement on multiple schema environment [Ken Tachiya] - -* Add support for tinyint data types. In MySQL all these types would be boolean, however in - our adapter, they will use the full 1 => 255 Fixnum value as you would expect. [Ken Collins] - - -* 2.2.21 * (September 10th, 2009) - -* Changes for gem best practices per http://weblog.rubyonrails.org/2009/9/1/gem-packaging-best-practices - Details of such are as follows: [Ken Collins] - - Removed rails-sqlserver-2000-2005-adapter.rb load file for old github usage. - - Move the core_ext directory to active_record/connection_adapters/sqlserver_adapter/core_ext - - Renamespace SQLServerDBI to ActiveRecord::ConnectionAdapters::SQLServerCoreExtensions::DBI - - Renamespace ActiveRecord::ConnectionAdapters::SQLServerActiveRecordExtensions to ActiveRecord::ConnectionAdapters::SQLServerCoreExtensions::ActiveRecord - - -* 2.2.20 * (September 10th, 2009) - -* Implement a new remove_default_constraint method that uses sp_helpconstraint [Ken Collins] - -* Use a lazy match in add_order_by_for_association_limiting! to allow sub selects to be used. Resolves - ticket #11. - -* Add default rake task back for testing. Runs the namespaced sqlserver:test_sqlserver_odbc. - Resolves ticket #10 [Ken Collins] - -* Default value detection in column_definitions is kinder to badly formatted, or long winded user - defined functions, for default values. Resolves ticket #8 [Ken Collins] - -* Make sure bigint SQL Server data type can be used and converted back to Bignum as expected. [Ken Collins] - - -* 2.2.19 * (June 19th, 2009) - -* Leave quoted column names as is. Resolves ticket #36 [Vince Puzzella] - -* Changing add_limit! in ActiveRecord::Base for SQLServer so that it passes through any scoped :order - parameters. Resolves ticket #35 [Murray Steele] - - -* 2.2.18 * (June 5th, 2009) - -* Column reflection on table name rescues LoadError and a few others. Resolves tickets #25 & #33 [Ken Collins] - -* Added 2008 support. Resolves ticket #32 [Ken Collins] - - -* 2.2.17 * (May 14th, 2009) - -* Add simplified type recognition for varchar(max) and nvarchar(max) under SQL Server 2005 to be a - :text type. This ensures schema dumper does the right thing. Fixes ticket #30. [Ken Collins] - -* Tested ruby 1.9, ruby-odbc 0.9996, and DBI 0.4.1. Also added correct support for UTF-8 character - encoding going in and out of the DB. See before gist http://gist.github.com/111709 and after gist - http://gist.github.com/111719 [Ken Collins] - - -* 2.2.16 * (April 21st, 2009) - -* Make add_limit_offset! only add locking hints (for tally) when the :lock option is present. Added tests - to make sure tally SQL is augmented correctly and tests to make sure that add_lock! is doing what it needs - for deep sub selects in paginated results. [Ken Collins] - -* Add auto reconnect support utilizing a new #with_auto_reconnect block. By default each query run through - the adapter will automatically reconnect at standard intervals, logging attempts along the way, till success - or the original exception bubbles up. See docs for more details. Resolves ticket #18 [Ken Collins] - -* Update internal helper method #orders_and_dirs_set to cope with an order clause like "description desc". This - resolves ticket #26 [Ken Collins] - -* Provide support for running queries at different isolation levels using #run_with_isolation_level method - that can take a block or not. Also implement a #user_options method that reflects on the current user - session values. Resolves #20 [Murray Steele] - - -* 2.2.15 * (March 23rd, 2009) - -* Better add_lock! method that can add the lock to just about all the elements in the statement. This - could be eager loaded associations, joins, etc. Done so that paginated results can easily add lock - options for performance. Note, the tally count in add_limit_offset! use "WITH (NOLOCK)" explicitly - as it can not hurt and is needed. [Ken Collins] - - -* 2.2.14 * (March 17th, 2009) - -* Rails2.3 - Back passing tests on 2.2 work. Includes: (1) Created new test helpers that check ActiveRecord - version strings so we can conditionally run 2.2 and 2.3 tests. (2) Making TransactionTestSqlserver use Ship vs - Bird model. Also made it conditional run a few blocks for different versions of ActiveRecord. (3) Previous - JoinDependency#aliased_table_name_for is now only patched in ActiveRecord equal or greater than 2.3. [Ken Collins] - -* Rails2.3 - Implement new savepoint support [Ken Collins] - http://rails.lighthouseapp.com/projects/8994/tickets/383 - http://www.codeproject.com/KB/database/sqlservertransactions.aspx - -* Rails2.3 - Coerce NestedScopingTest#test_merged_scoped_find to use correct regexp for adapter. [Ken Collins] - -* Rails2.3 - Implement a custom ActiveRecord::Associations::ClassMethods::JoinDependency::JoinAssociation#aliased_table_name_for - method that uses a Regexp.escape so that table/column quoting does not get ignored. [Ken Collins] - -* Rails2.3 - Implement #outside_transaction? and a new transaction test case to test some SQL Server - basic support while implementing this method. Future home of some savepoint tests too. [Ken Collins] - -* Rails2.3 - Coerced tests that ensure hash conditions on referenced tables are considered when eager - loading with limit/offset. Information on these changes and the ticket in rails are. - http://github.com/rails/rails/commit/9a4d557713acb0fc8e80f61af18094034aca029a - http://rails.lighthouseapp.com/projects/8994-ruby-on-rails/tickets/1404-conditions_tables-doesnt-understand-condition-hashes - -* Rails2.3 - Add coerced tests for true/false attributes in selects use SQL Server case statement. [Ken Collins] - -* Making sure that smalldatetime types are OK to use. Also fixed a bug in the #view_information method that - checks to see if a view definition is equal to 4000 chars, meaning that it is most likely truncated and - needs to use the backup method of sp_helptext to get it's view definition. [Ken Collins] - - -* 2.2.13 * (February 10th, 2009) - -* Update #indexes to use unqualified table name. Fixes cases where users may decide to use table - name prefixes like 'dbo.'. [Ken Collins] - - -* 2.2.12 * (February 8th, 2009) - -* Update table_exists? to work with qualified table names that may include an user prefix. [Ken Collins] - - -* 2.2.10/11 * (January 22nd, 2009) - -* Add a rails-sqlserver-2000-2005-adapter.rb file so that long :lib option for config.gem is no longer needed. - - -* 2.2.9 * (January 22nd, 2009) - -* Fixing a small bug in the deprecated DBI::Timestamp conversion so it correctly converts nanosecond whole - numbers to back to pre type cast SQL Server milliseconds, ultimately allow ruby's Time#usec which is - microseconds to be correct. [Ken Collins] - -* Sometimes views are more than 4000 chars long and will return NULL for the VIEW_DEFINITION. If so, use - sp_helptext procedure as a backup method. [Ken Collins] - - -* 2.2.8 (January 9th, 2009) - -* Update execute_procedure method a bit to remove excess code. [Ken Collins] - - -* 2.2.7 (January 9th, 2009) - -* Created a connection#execute_procedure method that takes can take any number of ruby objects as variables - and quotes them according to the connection's rules. Also added an ActiveRecord::Base class level core - extension that hooks into this. It also checks if the connection responds to #execute_procedure and if - not returns an empty array. [Ken Collins] - -* Added a #enable_default_unicode_types class attribute access to make all new added or changed string types - like :string/:text default to unicode/national data types. See the README for full details. Added a rake - task that assists setting this to true when running tests. [Ken Collins] - - -* 2.2.6 (January 8th, 2009) - -* Introduced a bug in 2.2.5 in the #add_order! core ext for ActiveRecord. Fixed [Ken Collins] - - -* 2.2.5 (January 4th, 2009) - -* Added a log_info_schema_queries class attribute and make all queries to INFORMATION_SCHEMA silent by - default. [Ken Collins] - -* Fix millisecond support in datetime columns. ODBC::Timestamp incorrectly takes SQL Server milliseconds - and applies them as nanoseconds. We cope with this at the DBI layer by using SQLServerDBI::Type::SqlserverTimestamp - class to parse the before type cast value appropriately. Also update the adapters #quoted_date method - to work more simply by converting ruby's #usec milliseconds to SQL Server microseconds. [Ken Collins] - -* Core extensions for ActiveRecord now reflect on the connection before doing SQL Server things. Now - this adapter is compatible for using with other adapters. [Ken Collins] - - -* 2.2.4 (December 5th, 2008) - -* Fix a type left in #views_real_column_name. Also cache #view_information lookups. [Ken Collins] - - -* 2.2.3 (December 5th, 2008) - -* Changing back to using real table name in column_definitions. Makes sure views get back only the columns - that are defined for them with correct names, etc. Now supporting views by looking for NULL default and - then if table name is a view, perform a targeted with sub select to the real table name and column name - to find true default. [Ken Collins] - -* Ensure that add_limit_offset! does not alter sub queries. [Erik Bryn] - - -2.2.2 (December 2nd, 2008) - -* Add support for view defaults by making column_definitions use real table name for schema info. [Ken Collins] - -* Include version in connection method and inspection. [Ken Collins] - - -2.2.1 (November 25th, 2008) - -* Add identity insert support for views. Cache #views so that identity #table_name_or_views_table_name - will run quickly. [Ken Collins] - -* Add views support. ActiveRecord classes can use views. The connection now has a #views method and - #table_exists? will now fall back to checking views too. [Ken Collins] - - -2.2.0 (November 21st, 2008) - -* Release for rails 2.2.2. Many many changes. [Ken Collins], [Murray Steele], [Shawn Balestracci], [Joe Rafaniello] - diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000..d345aff8d --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,9 @@ +## Unreleased + +#### Added + +#### Changed + +#### Fixed + +Please check [8-1-stable](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/blob/8-1-stable/CHANGELOG.md) for previous changes. diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..4a4d8e858 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,31 @@ +Contributor Code of Conduct + +As contributors and maintainers of this project, we pledge to respect all +people who contribute through reporting issues, posting feature requests, +updating documentation, submitting pull requests or patches, and other +activities. + +We are committed to making participation in this project a harassment-free +experience for everyone, regardless of level of experience, gender, gender +identity and expression, sexual orientation, disability, personal appearance, +body size, race, ethnicity, age, or religion. + +Examples of unacceptable behavior by participants include the use of sexual +language or imagery, derogatory comments or personal attacks, trolling, public +or private harassment, insults, or other unprofessional conduct. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct. Project maintainers who do not +follow the Code of Conduct may be removed from the project team. + +This code of conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by opening an issue or contacting one or more of the project +maintainers. + +This Code of Conduct is adapted from the Contributor Covenant +(http://contributor-covenant.org), version 1.1.0, available at +http://contributor-covenant.org/version/1/1/0/ diff --git a/Dockerfile.ci b/Dockerfile.ci new file mode 100644 index 000000000..74f636508 --- /dev/null +++ b/Dockerfile.ci @@ -0,0 +1,14 @@ +ARG TARGET_VERSION=3.2.1 + +FROM ghcr.io/rails-sqlserver/activerecord-sqlserver-adapter:${TARGET_VERSION} + +ENV WORKDIR /activerecord-sqlserver-adapter + +RUN mkdir -p $WORKDIR +WORKDIR $WORKDIR + +COPY . $WORKDIR + +RUN RAILS_BRANCH=main bundle install --jobs `expr $(cat /proc/cpuinfo | grep -c "cpu cores") - 1` --retry 3 + +CMD ["sh"] diff --git a/Gemfile b/Gemfile index 20be78a73..cbb406565 100644 --- a/Gemfile +++ b/Gemfile @@ -1,46 +1,74 @@ +# frozen_string_literal: true -source :rubygems +source "https://rubygems.org" -if ENV['RAILS_SOURCE'] - gemspec :path => ENV['RAILS_SOURCE'] +git_source(:github) { |repo| "https://github.com/#{repo}.git" } + +gemspec + +gem "bcrypt" +gem "pg", "1.5.9" +gem "sqlite3", ">= 2.1" +gem "tzinfo-data", platforms: [:mingw, :mswin, :x64_mingw, :jruby] +gem "benchmark-ips" +gem "minitest", ">= 5.15.0" +gem "msgpack", ">= 1.7.0" + +if ENV["RAILS_SOURCE"] + gemspec path: ENV["RAILS_SOURCE"] +elsif ENV["RAILS_BRANCH"] + gem "rails", github: "rails/rails", branch: ENV["RAILS_BRANCH"] +elsif ENV["RAILS_COMMIT"] + gem "rails", github: "rails/rails", ref: ENV["RAILS_COMMIT"] else - version = ENV['RAILS_VERSION'] || begin - require 'net/http' - spec = eval(File.read('activerecord-sqlserver-adapter.gemspec')) - version = spec.dependencies.detect{ |d|d.name == 'activerecord' }.requirement.requirements.first.last.version - major, minor, tiny = version.split('.') - uri = URI.parse "http://rubygems.org/api/v1/versions/activerecord.yaml" - YAML.load(Net::HTTP.get(uri)).select do |data| - a, b, c = data['number'].split('.') - !data['prerelease'] && major == a && minor == b - end.first['number'] - end - gem 'rails', :git => "git://github.com/rails/rails.git", :tag => "v#{version}" -end + # Need to get rails source because the gem doesn't include tests + version = ENV["RAILS_VERSION"] || begin + require "openssl" + require "net/http" + require "yaml" -if ENV['AREL'] - gem 'arel', :path => ENV['AREL'] + spec = Gem::Specification.load("activerecord-sqlserver-adapter.gemspec") + ver = spec.dependencies.detect { |d| d.name == "activerecord" }.requirement.requirements.first.last.version + major, minor, _tiny, pre = ver.split(".") + + if pre + ver + else + uri = URI.parse("https://rubygems.org/api/v1/versions/activerecord.yaml") + http = Net::HTTP.new(uri.host, uri.port) + http.use_ssl = true + http.verify_mode = OpenSSL::SSL::VERIFY_NONE + YAML.load(http.request(Net::HTTP::Get.new(uri.request_uri)).body).find do |data| + a, b, = data["number"].split(".") + !data["prerelease"] && major == a && (minor.nil? || minor == b) + end["number"] + end + end + gem "rails", github: "rails/rails", tag: "v#{version}" end +# rubocop:disable Bundler/DuplicatedGem group :tinytds do - if ENV['TINYTDS_SOURCE'] - gem 'tiny_tds', :path => ENV['TINYTDS_SOURCE'] + if ENV["TINYTDS_SOURCE"] + gem "tiny_tds", path: ENV["TINYTDS_SOURCE"] + elsif ENV["TINYTDS_VERSION"] + gem "tiny_tds", ENV["TINYTDS_VERSION"] else - gem 'tiny_tds', '0.5.1' + gem "tiny_tds" end end +# rubocop:enable Bundler/DuplicatedGem -group :odbc do - gem 'ruby-odbc' +group :development do + gem "minitest-spec-rails" + gem "mocha" + gem "pry-byebug", platform: [:mri, :mingw, :x64_mingw] end -group :development do - gem 'bcrypt-ruby', '~> 3.0.0' - gem 'bench_press' - gem 'm' - gem 'mocha', '0.9.8' - gem 'nokogiri' - gem 'rake', '~> 0.9.2' - gem 'shoulda', '2.10.3' +group :guard do + gem "guard" + gem "guard-minitest" + gem "terminal-notifier-guard" if /darwin/.match?(RbConfig::CONFIG["host_os"]) end +gem "standard", require: false diff --git a/Guardfile b/Guardfile new file mode 100644 index 000000000..17f008db2 --- /dev/null +++ b/Guardfile @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +require_relative "test/support/paths_sqlserver" + +clearing :on +notification :terminal_notifier if defined?(TerminalNotifier) +ignore %r{debug\.log} + +ar_lib = File.join ARTest::SQLServer.root_activerecord, "lib" +ar_test = File.join ARTest::SQLServer.root_activerecord, "test" + +guard :minitest, { + all_on_start: false, + autorun: false, + include: ["lib", "test", ar_lib, ar_test], + test_folders: ["test"], + test_file_patterns: ["*_test.rb", "*_test_sqlserver.rb"] +} do + # Our project watchers. + if ENV["TEST_FILES"] + ENV["TEST_FILES"].split(",").map(&:strip).each do |file| + watch(%r{.*}) { file } + end + else + watch(%r{^test/cases/\w+_test_sqlserver\.rb$}) + watch(%r{^test/cases/coerced_tests\.rb$}) { "test/cases/coerced_tests.rb" } + watch(%r{^lib/active_record/connection_adapters/sqlserver/([^/]+)\.rb$}) { |m| "test/cases/#{m[1]}_test_sqlserver.rb" } + watch(%r{^test/cases/helper_sqlserver\.rb$}) { "test" } + end +end diff --git a/MIT-LICENSE b/MIT-LICENSE index e75026d4f..c095e0832 100644 --- a/MIT-LICENSE +++ b/MIT-LICENSE @@ -1,5 +1,5 @@ -Copyright (c) 2008-2011 - +Copyright (c) Ken Collins + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including @@ -7,14 +7,14 @@ without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - + The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md index d81f8eca8..e9a712787 100644 --- a/README.md +++ b/README.md @@ -1,175 +1,142 @@ -# ActiveRecord SQL Server Adapter. For SQL Server 2005 And Higher. +# ActiveRecord SQL Server Adapter -The SQL Server adapter for ActiveRecord. If you need the adapter for SQL Server 2000, you are still in the right spot. Just install the latest 2.3.x version of the adapter. Note, we follow a rational versioning policy that tracks ActiveRecord. That means that our 2.3.x version of the adapter is only for the latest 2.3 version of Rails. We also have stable branches for each major/minor release of ActiveRecord. +* [![CI](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/actions/workflows/ci.yml/badge.svg)](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/actions/workflows/ci.yml) - CI +* [![Gem Version](http://img.shields.io/gem/v/activerecord-sqlserver-adapter.svg)](https://rubygems.org/gems/activerecord-sqlserver-adapter) - Gem Version +* [![Gitter chat](https://img.shields.io/badge/%E2%8A%AA%20GITTER%20-JOIN%20CHAT%20%E2%86%92-brightgreen.svg?style=flat)](https://gitter.im/rails-sqlserver/activerecord-sqlserver-adapter) - Community +## About The Adapter -## What's New +The SQL Server adapter for ActiveRecord using SQL Server 2012 or higher. -* Rails 3.2 support. With explain (SHOWPLAN) support. -* Deadlock victim retry logic using the #retry_deadlock_victim config. -* Proper interface to configure the connection and TinyTDS app name reported to SQL Server. -* Rails 3.1 prepared statement support leverages cached query plans. - If you use DBLIB/TinyTDS, you must use FreeTDS 0.91 !!!!! - https://github.com/rails-sqlserver/tiny_tds/issues/41 -* We now support your native language date/time formats automatically! -* Default unicode datatypes! Disable with #enable_default_unicode_types to false. -* New #lowercase_schema_reflection configuration option for legacy DBs. -* New dblib connection mode using TinyTDS! Default mode too! +We follow a rational versioning policy that tracks Rails. That means that our 7.x version of the adapter is only +for the latest 7.x version of Rails. We also have stable branches for each major/minor release of ActiveRecord. +We support the versions of the adapter that are in the Rails [Bug Fixes](https://rubyonrails.org/maintenance) +maintenance group. -#### Testing Rake Tasks Support +See [Rubygems](https://rubygems.org/gems/activerecord-sqlserver-adapter/versions) for the latest version of the adapter for each Rails release. -This is a long story, but if you are not working with a legacy database and you can trust your schema.rb to setup your local development or test database, then we have adapter level support for rails :db rake tasks. Please read this wiki page for full details. +| Adapter Version | Rails Version | Support | Branch | +|-----------------|---------------|----------------|----------------------------------------------------------------------------------------------------| +| Unreleased | `8.2.x` | In Development | [main](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/tree/main) | +| `8.1.x` | `8.1.x` | Active | [8-1-stable](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/tree/8-1-stable) | +| `8.0.x` | `8.0.x` | Active | [8-0-stable](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/tree/8-0-stable) | +| `7.2.x` | `7.2.x` | Ended | [7-2-stable](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/tree/7-2-stable) | +| `7.1.x` | `7.1.x` | Ended | [7-1-stable](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/tree/7-1-stable) | +| `7.0.x` | `7.0.x` | Ended | [7-0-stable](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/tree/7-0-stable) | +| `6.1.x` | `6.1.x` | Ended | [6-1-stable](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/tree/6-1-stable) | +| `6.0.x` | `6.0.x` | Ended | [6-0-stable](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/tree/6-0-stable) | +| `5.2.x` | `5.2.x` | Ended | [5-2-stable](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/tree/5-2-stable) | +| `5.1.x` | `5.1.x` | Ended | [5-1-stable](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/tree/5-1-stable) | +| `4.2.x` | `4.2.x` | Ended | [4-2-stable](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/tree/4-2-stable) | +| `4.1.x` | `4.1.x` | Ended | [4-1-stable](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/tree/4-1-stable) | -http://wiki.github.com/rails-sqlserver/activerecord-sqlserver-adapter/rails-db-rake-tasks +#### Native Data Type Support -#### Date/Time Data Type Hinting +We support every data type supported by FreeTDS. All simplified Rails types in migrations will correspond to a matching SQL Server national (unicode) data type. Always check the `NATIVE_DATABASE_TYPES` [(here)](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/blob/master/lib/active_record/connection_adapters/sqlserver_adapter.rb) for an updated list. -SQL Server 2005 does not include a native data type for just `date` or `time`, it only has `datetime`. To pass the ActiveRecord tests we implemented two simple class methods that can teach your models to coerce column information to be cast correctly. Simply pass a list of symbols to either the `coerce_sqlserver_date` or `coerce_sqlserver_time` methods that correspond to 'datetime' columns that need to be cast correctly. +The following types (`date`, `datetime2`, `datetimeoffset`, `time`) all require TDS version `7.3` with TinyTDS. We recommend using FreeTDS 1.0 or higher which default to using `TDSVER` to `7.3`. The adapter also sets TinyTDS's `tds_version` to this as well if non is specified. -```ruby -class Topic < ActiveRecord::Base - coerce_sqlserver_date :last_read - coerce_sqlserver_time :bonus_time -end -``` +The adapter supports ActiveRecord's `datetime_with_precision` setting. This means that passing `:precision` to a datetime column is supported. -This implementation has some limitations. To date we can only coerce date/time types for models that conform to the expected ActiveRecord class to table naming conventions. So a table of 'foo_bar_widgets' will look for coerced column types in the FooBarWidget class. +By default, precision 6 is used for `:datetime` types if precision is not specified. Any non-nil precision will tell +the adapter to use the `datetime2` column type. To create a `datetime` column using a migration a precision of `nil` +should be specified, otherwise the precision will default to 6 and a `datetime2` column will be created. -#### Executing Stored Procedures +#### Identity Inserts with Triggers -Every class that sub classes ActiveRecord::Base will now have an execute_procedure class method to use. This method takes the name of the stored procedure which can be a string or symbol and any number of variables to pass to the procedure. Arguments will automatically be quoted per the connection's standards as normal. For example: +The adapter uses `OUTPUT INSERTED` so that we can select any data type key, for example UUID tables. However, this poses a problem with tables that use triggers. The solution requires that we use a more complex insert statement which uses a temporary table to select the inserted identity. To use this format you must declare your table exempt from the simple output inserted style with the table name into a concurrent hash. Optionally, you can set the data type of the table's primary key to return. ```ruby -Account.execute_procedure :update_totals, 'admin', nil, true -# Or with named parameters. -Account.execute_procedure :update_totals, :named => 'params' -``` +adapter = ActiveRecord::ConnectionAdapters::SQLServerAdapter -#### Native Data Type Support +# Will assume `bigint` as the id key temp table type. +adapter.exclude_output_inserted_table_names['my_table_name'] = true -Currently the following custom data types have been tested for schema definitions. +# Explicitly set the data type for the temporary key table. +adapter.exclude_output_inserted_table_names['my_uuid_table_name'] = 'uniqueidentifier' -* char -* nchar -* nvarchar -* ntext -* varchar(max) -* nvarchar(max) -For example: - -```ruby -create_table :sql_server_custom_types, :force => true do |t| - t.column :ten_code, :char, :limit => 10 - t.column :ten_code_utf8, :nchar, :limit => 10 - t.column :title_utf8, :nvarchar - t.column :body, :varchar_max # Creates varchar(max) - t.column :body_utf8, :ntext - t.column :body2_utf8, :nvarchar_max # Creates nvarchar(max) -end +# Explicitly set data types when data type is different for composite primary keys. +adapter.exclude_output_inserted_table_names['my_composite_pk_table_name'] = { pk_col_one: "uniqueidentifier", pk_col_two: "int" } ``` -Manually creating a `varchar(max)` is not necessary since this is the default type created when specifying a `:text` field. As time goes on we will be testing other SQL Server specific data types are handled correctly when created in a migration. - - -#### Native Text/String/Binary Data Type Accessor -To pass the ActiveRecord tests we had to implement an class accessor for the native type created for `:text` columns. By default any `:text` column created by migrations will create a `varchar(max)` data type. This type can be queried using the SQL = operator and has plenty of storage space which is why we made it the default. If for some reason you want to change the data type created during migrations you can configure this line to your liking in a config/initializers file. - -```ruby -ActiveRecord::ConnectionAdapters::SQLServerAdapter.native_text_database_type = 'varchar(8000)' -``` +#### Force Schema To Lowercase -Also, there is a class attribute setter for the native string database type. This is the same for all SQL Server versions, `varchar`. However it can be used instead of the #enable_default_unicode_types below for finer grain control over which types you want unicode safe when adding or changing the schema. +Although it is not necessary, the Ruby convention is to use lowercase method names. If your database schema is in upper or mixed case, we can force all table and column names during the schema reflection process to be lowercase. Add this to your config/initializers file for the adapter. ```ruby -ActiveRecord::ConnectionAdapters::SQLServerAdapter.native_string_database_type = 'nvarchar' +ActiveRecord::ConnectionAdapters::SQLServerAdapter.lowercase_schema_reflection = true ``` -By default any :binary column created by migrations will create a `varbinary(max)` data type. This too can be set using an initializer. -```ruby -ActiveRecord::ConnectionAdapters::SQLServerAdapter.native_binary_database_type = 'image' -``` - -#### Setting Unicode Types As Default +#### Schemas & Users -By default the adapter will use unicode safe data types for `:string` and `:text` types when defining/changing the schema! This was changed in version 3.1 since it is about time we push better unicode support and since we default to TinyTDS (DBLIB) which supports unicode queries and data. If you choose, you can set the following class attribute in a config/initializers file that will disable this behavior. +Depending on your user and schema setup, it may be needed to use a table name prefix of `dbo.`. So something like this in your initializer file for ActiveRecord or the adapter. ```ruby -# Default -ActiveRecord::ConnectionAdapters::SQLServerAdapter.enable_default_unicode_types = true -ActiveRecord::ConnectionAdapters::SQLServerAdapter.native_text_database_type = 'nvarchar(max)' -ActiveRecord::ConnectionAdapters::SQLServerAdapter.native_string_database_type = 'nvarchar' - -# Disabled -ActiveRecord::ConnectionAdapters::SQLServerAdapter.enable_default_unicode_types = false -ActiveRecord::ConnectionAdapters::SQLServerAdapter.native_text_database_type = 'varchar(max)' -ActiveRecord::ConnectionAdapters::SQLServerAdapter.native_string_database_type = 'varchar' +ActiveRecord::Base.table_name_prefix = 'dbo.' ``` -It is important to remember that unicode types in SQL Server have approximately half the storage capacity as their counter parts. So where a normal string would max out at (8000) a unicode string will top off at (4000). - - -#### Deadlock Victim Retry - -In a config initializer, you can configure the adapter to retry deadlock victims' SQL. Note, this relies on us copying ActiveRecord's `#transaction` method and can be brittle when upgrading. If you think that our version of `#transaction` is out of sync with the version of rails in our gemspec, please open a ticket and let us know. Our custom transaction method can be found in `activerecord/connection_adapters/sqlserver/core_ext/database_statements.rb`. +It's also possible to create/change/drop a schema in the migration file as in the example below: ```ruby -ActiveRecord::ConnectionAdapters::SQLServerAdapter.retry_deadlock_victim = true -``` +class CreateFooSchema < ActiveRecord::Migration[7.0] + def up + create_schema('foo') + # Or you could move a table to a different schema -#### Force Schema To Lowercase - -Although it is not necessary, the Ruby convention is to use lowercase method names. If your database schema is in upper or mixed case, we can force all table and column names during the schema reflection process to be lowercase. Add this to your config/initializers file for the adapter. + change_table_schema('foo', 'dbo.admin') + end -```ruby -ActiveRecord::ConnectionAdapters::SQLServerAdapter.lowercase_schema_reflection = true + def down + drop_schema('foo') + end +end ``` -#### Schemas & Users +#### Configure Connection -Depending on your user and schema setup, it may be needed to use a table name prefix of `dbo.`. So something like this in your initializer file for ActiveRecord or the adapter. +The adapter conforms to the AbstractAdapter interface to configure connections. If you require additional connection +configuration then implement the `configure_connection` method in an initializer like so. In the following +example we are setting the `TEXTSIZE` to 64 megabytes. ```ruby -ActiveRecord::Base.table_name_prefix = 'dbo.' +ActiveRecord::ConnectionAdapters::SQLServerAdapter.prepend( + Module.new do + def configure_connection + super + @raw_connection.execute("SET TEXTSIZE #{64.megabytes}").do + end + end +) ``` +#### Configure Application Name -#### Auto Connecting +TinyTDS supports an application name when it logs into SQL Server. This can be used to identify the connection in SQL Server's activity monitor. By default it will use the `appname` from your database.yml file or your Rails::Application name. -By default the adapter will auto connect to lost DB connections. For every query it will retry at intervals of 2, 4, 8, 16 and 32 seconds. During each retry it will callback out to ActiveRecord::Base.did_retry_sqlserver_connection(connection,count). When all retries fail, it will callback to ActiveRecord::Base.did_lose_sqlserver_connection(connection). Both implementations of these methods are to write to the rails logger, however, they make great override points for notifications like Hoptoad. If you want to disable automatic reconnections use the following in an initializer. +Below shows how you might use the database.yml file to use the process ID in your application name. -```ruby -ActiveRecord::ConnectionAdapters::SQLServerAdapter.auto_connect = false +```yaml +development: + adapter: sqlserver + appname: <%= "myapp_#{Process.pid}" %> ``` +#### Executing Stored Procedures -#### Configure Connection & App Name - -We currently conform to an unpublished and non-standard AbstractAdapter interface to configure connections made to the database. To do so, just override the `configure_connection` method in an initializer like so. In this case below we are setting the `TEXTSIZE` to 64 megabytes. Also, TinyTDS supports an application name when it logs into SQL Server. This can be used to identify the connection in SQL Server's activity monitor. By default it will use the `appname` from your database.yml file or a lowercased version of your Rails::Application name. It is now possible to define a `configure_application_name` method that can give you per instance details. Below shows how you might use this to get the process id and thread id of the current connection. +Every class that sub classes ActiveRecord::Base will now have an execute_procedure class method to use. This method takes the name of the stored procedure which can be a string or symbol and any number of variables to pass to the procedure. Arguments will automatically be quoted per the connection's standards as normal. For example: ```ruby -module ActiveRecord - module ConnectionAdapters - class SQLServerAdapter < AbstractAdapter - - def configure_connection - raw_connection_do "SET TEXTSIZE #{64.megabytes}" - end - - def configure_application_name - "myapp_#{$$}_#{Thread.current.object_id}".to(29) - end - - end - end -end +Account.execute_procedure(:update_totals, 'admin', nil, true) +# Or with named parameters. +Account.execute_procedure(:update_totals, named: 'params') ``` #### Explain Support (SHOWPLAN) @@ -177,7 +144,7 @@ end The 3.2 version of the adapter support ActiveRecord's explain features. In SQL Server, this is called the showplan. By default we use the `SHOWPLAN_ALL` option and format it using a simple table printer. So the following ruby would log the plan table below it. ```ruby -Car.where(:id => 1).explain +Car.where(id: 1).explain ``` ``` @@ -193,80 +160,145 @@ EXPLAIN for: SELECT [cars].* FROM [cars] WHERE [cars].[id] = 1 You can configure a few options to your needs. First is the max column width for the logged table. The default value is 50 characters. You can change it like so. ```ruby -ActiveRecord::ConnectionAdapters::Sqlserver::Showplan::PrinterTable.max_column_width = 500 +ActiveRecord::ConnectionAdapters::SQLServer::Showplan::PrinterTable.max_column_width = 500 ``` -Another configuration is the showplan option. Some might find the XML format more useful. If you have Nokogiri installed, we will format the XML string. I will gladly accept pathces that make the XML printer more useful! +Another configuration is the showplan option. Some might find the XML format more useful. If you have Nokogiri installed, we will format the XML string. I will gladly accept pathches that make the XML printer more useful! ```ruby ActiveRecord::ConnectionAdapters::SQLServerAdapter.showplan_option = 'SHOWPLAN_XML' ``` +**NOTE:** The method we utilize to make SHOWPLANs work is very brittle to complex SQL. There is no getting around this as we have to deconstruct an already prepared statement for the sp_executesql method. If you find that explain breaks your app, simple disable it. Do not open a github issue unless you have a patch. Please [consult the Rails guides](http://guides.rubyonrails.org/active_record_querying.html#running-explain) for more info. -**NOTE:** The method we utilize to make SHOWPLANs work is very brittle to complex SQL. There is no getting around this as we have to deconstruct an already prepared statement for the sp_executesql method. If you find that explain breaks your app, simple disable it. Do not open a github issue unless you have a patch. To disable explain, just set the threshold to nil. Please [consult the Rails guides](http://guides.rubyonrails.org/active_record_querying.html#running-explain) for more info. Change this setting in your ```config/environments/development.rb```: +#### `insert_all` / `upsert_all` support + +`insert_all` and `upsert_all` on other database system like MySQL, SQlite or PostgreSQL use a clause with their `INSERT` statement to either skip duplicates (`ON DUPLICATE KEY IGNORE`) or to update the existing record (`ON DUPLICATE KEY UPDATE`). Microsoft SQL Server does not offer these clauses, so the support for these two options is implemented slightly different. + +Behind the scenes, we execute a `MERGE` query, which joins your data that you want to insert or update into the table existing on the server. The emphasis here is "JOINING", so we also need to remove any duplicates that might make the `JOIN` operation fail, e.g. something like this: ```ruby -config.active_record.auto_explain_threshold_in_seconds = nil +Book.insert_all [ + { id: 200, author_id: 8, name: "Refactoring" }, + { id: 200, author_id: 8, name: "Refactoring" } +] ``` +The removal of duplicates happens during the SQL query. -## Versions +Because of this implementation, if you pass `on_duplicate` to `upsert_all`, make sure to assign your value to `target.[column_name]` (e.g. `target.status = GREATEST(target.status, 1)`). To access the values that you want to upsert, use `source.[column_name]`. -The adapter follows a rational versioning policy that also tracks ActiveRecord's major and minor version. That means the latest 3.1.x version of the adapter will always work for the latest 3.1.x version of ActiveRecord. +#### Computed Columns +The adapter supports computed columns. They can either be virtual `stored: false` (default) and persisted `stored: true`. You can create a computed column in a migration like so: -## Installation +```ruby +create_table :users do |t| + t.string :name + t.virtual :lower_name, as: "LOWER(name)", stored: false + t.virtual :upper_name, as: "UPPER(name)", stored: true + t.virtual :name_length, as: "LEN(name)" +end +``` + +## New Rails Applications + +When creating a new Rails application you need to perform the following steps to connect a Rails application to a +SQL Server instance. + +1. Create new Rails application, the database defaults to `sqlite`. + +```bash +rails new my_app +``` -The adapter has no strict gem dependencies outside of ActiveRecord. You will have to pick a connection mode, the default is dblib which uses the TinyTDS gem. Just bundle the gem and the adapter will use it. +2. Update the Gemfile to install the adapter instead of the SQLite adapter. Remove the `sqlite3` gem from the Gemfile. ```ruby -gem 'tiny_tds' -gem 'activerecord-sqlserver-adapter', '~> 3.1.0' +gem 'activerecord-sqlserver-adapter' ``` -If you want to use ruby ODBC, please use at least version 0.99992 since that contains fixes for both native types as well as fixes for proper encoding support under 1.9. If you have any troubles installing the lower level libraries for the adapter, please consult the wiki pages for various platform installation guides. Tons of good info can be found and we ask that you contribute too! +3. Connect the application to your SQL Server instance by editing the `config/database.yml` file with the username, +password and host of your SQL Server instance. -http://wiki.github.com/rails-sqlserver/activerecord-sqlserver-adapter/platform-installation +Example: +```yaml +development: + adapter: sqlserver + host: 'localhost' + port: 1433 + database: my_app_development + username: 'frank_castle' + password: 'secret' +``` + +## Installation +The adapter has no strict gem dependencies outside of `ActiveRecord` and +[TinyTDS](https://github.com/rails-sqlserver/tiny_tds). + +```ruby +gem 'activerecord-sqlserver-adapter' +``` + +## Reporting an Issue + +Having a way to reproduce your issue will help people confirm, investigate, and ultimately fix your issue. You +can do this by providing an executable test case. To make this process easier, we have prepared a bug report template +for you to use as a starting point at [How to report a bug](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/wiki/How-to-report-a-bug). ## Contributing -If you would like to contribute a feature or bugfix, thanks! To make sure your fix/feature has a high chance of being added, please read the following guidelines. First, ask on the Google list, IRC, or post a ticket on github issues. Second, make sure there are tests! We will not accept any patch that is not tested. Please read the `RUNNING_UNIT_TESTS` file for the details of how to run the unit tests. +Please contribute to the project by submitting bug fixes and features. To make sure your fix/feature has +a high chance of being added, please include tests in your pull request. To run the tests you will need to +setup your development environment. -* Github: http://github.com/rails-sqlserver/activerecord-sqlserver-adapter -* Google Group: http://groups.google.com/group/rails-sqlserver-adapter -* IRC Room: #rails-sqlserver on irc.freenode.net +## Setting Up Your Development Environment +To run the test suite you can use any of the following methods below. See [RUNNING_UNIT_TESTS](RUNNING_UNIT_TESTS.md) for +more detailed information on running unit tests. -## Credits & Contributions +### Dev Container CLI -Many many people have contributed. If you do not see your name here and it should be let us know. Also, many thanks go out to those that have pledged financial contributions. +With [Docker](https://www.docker.com) and [npm](https://github.com/npm/cli) installed, you can run [Dev Container CLI](https://github.com/devcontainers/cli) to +utilize the [`.devcontainer`](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/tree/main/.devcontainer) configuration from the command line. +```bash +$ npm install -g @devcontainers/cli +$ cd activerecord-sqlserver-adapter +$ devcontainer up --workspace-folder . +$ devcontainer exec --workspace-folder . bash +``` -## Contributers -Up-to-date list of contributors: http://github.com/rails-sqlserver/activerecord-sqlserver-adapter/contributors +From within the container, you can run the tests using the following command: -* metaskills (Ken Collins) -* h-lame (Murray Steele) -* vegantech -* cjheath (Clifford Heath) -* fryguy (Jason Frey) -* jrafanie (Joe Rafaniello) -* nerdrew (Andrew Ryan) -* snowblink (Jonathan Lim) -* koppen (Jakob Skjerning) -* ebryn (Erik Bryn) -* adzap (Adam Meehan) -* neomindryan (Ryan Findley) -* jeremydurham (Jeremy Durham) +```bash +$ bundle install +$ bundle exec rake test +``` +_Note: The setup we use is based on the [Rails Dev Container setup.](https://guides.rubyonrails.org/contributing_to_ruby_on_rails.html#using-dev-container-cli)_ -## Donators +### VirtualBox & Vagrant -I am trying to save up for a Happy Hacking pro keyboard. Help me out via GitTip! https://www.gittip.com/metaskills/ +The [activerecord-sqlserver-adapter-dev-box](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter-dev-box) +is a Vagrant/VirtualBox virtual machine that has MS SQL Server installed. However, the +activerecord-sqlserver-adapter-dev-box uses Vagrant and Virtual Box which will not work on Macs with Apple silicon. +### Local Development -## License +See the [RUNNING_UNIT_TESTS](RUNNING_UNIT_TESTS.md) file for the details of how to run the unit tests locally. + +## Community + +There is a [Gitter channel](https://gitter.im/rails-sqlserver/activerecord-sqlserver-adapter) for the project where you are free to ask questions about the project. + +## Credits & Contributions + +Many many people have contributed. If you do not see your name here and it should be let us know. Also, many thanks go out to those that have pledged financial contributions. -Copyright © 2008-2011. It is free software, and may be redistributed under the terms specified in the MIT-LICENSE file. +You can see an up-to-date list of contributors here: http://github.com/rails-sqlserver/activerecord-sqlserver-adapter/contributors + +## License +ActiveRecord SQL Server Adapter is released under the [MIT License](https://opensource.org/licenses/MIT). diff --git a/RUNNING_UNIT_TESTS.md b/RUNNING_UNIT_TESTS.md index edba6e96a..094c16532 100644 --- a/RUNNING_UNIT_TESTS.md +++ b/RUNNING_UNIT_TESTS.md @@ -1,101 +1,120 @@ - -# How To Run The Test! - -This process is much easier than it has been before! - - -## TL;DR - -Default testing uses DBLIB with TinyTDS. - -* Setup two databases in SQL Server, [activerecord_unittest] and [activerecord_unittest2] -* Create a [rails] user with an empty password and give it a [db_owner] role to both DBs. Some tests require a server role of [sysadmin] too. - - http://twitpic.com/9bsiyp/full - - http://twitpic.com/9bsj7z/full - - http://twitpic.com/9bsjdx/full - - http://twitpic.com/9bsjl7/full -* $ git clone git://github.com/rails-sqlserver/activerecord-sqlserver-adapter.git -* $ bundle install -* $ bundle exec rake test ACTIVERECORD_UNITTEST_HOST='my.db.net' - - -## Creating the test databases - -The default names for the test databases are `activerecord_unittest` and `activerecord_unittest2`. If you want to use another database name then be sure to update the connection file that matches your connection method in test/connections/native_sqlserver_#{connection_method}/connection.rb. Define a user named 'rails' in SQL Server with all privileges granted for the test databases. Use an empty password for said user. - -The connection files make certain assumptions. For instance, the ODBC connection assumes you have a DSN setup that matches the name of the default database names. Remember too you have to set an environment variable for the DSN of the adapter, see the connection.rb file that matches your connection mode for details. - - -## Cloning The Repos - -Clone adapter git://github.com/rails-sqlserver/activerecord-sqlserver-adapter.git. The master branch is the one under development for Rails 3, track the repos 2-3-stable branch for 2.x development. - -The tests of this adapter depend on the existence of the Rails which under the 3.1 version and above is automatically cloned for you with bundler. However you can clone Rails from git://github.com/rails/rails.git and set the `RAILS_SOURCE` environment variable so bundler will use another local path instead. - -``` -$ git clone git://github.com/rails-sqlserver/activerecord-sqlserver-adapter.git -``` - -Optionally, you an just let bundler do all the work and assuming there is a git tag for the Rails version, you can set `RAILS_VERSION` before bundling. - -``` -$ export RAILS_VERSION='3.1.1' -$ bundle install -``` - - -## Configure DB Connection - -Please consult the `test/config.yml` file which is used to parse the configuration options for the DB connections when running tests. This file has overrides for any connection mode that you can set using simple environment variables. Assuming you are using FreeTDS 0.91 and above - -``` -$ export ACTIVERECORD_UNITTEST_HOST='my.db.net' # Defaults to localhost -$ export ACTIVERECORD_UNITTEST_PORT='1533' # Defaults to 1433 -``` - -If you have FreeTDS installed and/or want to use a named dataserver in your freetds.conf file - -``` -$ export ACTIVERECORD_UNITTEST_DATASERVER='mydbname' -``` - -These can be passed down to rake too. - -``` -$ bundle exec rake test ACTIVERECORD_UNITTEST_HOST='my.db.net' -``` - - -## Bundling - -Now with that out of the way you can run "bundle install" to hook everything up. Our tests use bundler to setup the load paths correctly. The default mode is DBLIB using TinyTDS. It is important to use bundle exec so we can wire up the ActiveRecord test libs correctly. - -``` -$ bundle exec rake test -``` - - -## Testing Options - -The Gemfile contains groups for `:tinytds` and `:odbc`. By default it will install both gems which allows you to run the full test suite in either connection mode. If for some reason any one of these is problematic or of no concern, you could always opt out of bundling either gem with something like this. - -``` -$ bundle install --without odbc -``` - -You can run different connection modes using the following rake commands. Again, the DBLIB connection mode using TinyTDS is the default test task. - -``` -$ bundle exec rake test:dblib -$ bundle exec rake test:odbc -``` - -By default, Bundler will download the Rails git repo and use the git tag that matches the dependency version in our gemspec. If you want to test another version of Rails, you can either temporarily change the :tag for Rails in the Gemfile. Likewise, you can clone the Rails repo your self to another directory and use the `RAILS_SOURCE` environment variable. - - -## Current Expected Failures - -* Misc Date/Time erros when using ODBC mode. -* Misc Date/Time erros when testing SQL Server 2005. - - +# How To Run The Tests Locally + +The following is a description of how to run the tests for the SQL Server adapter on a local environment. + +## MS SQL Server instance + +If you don't have easy access to MS SQL Server, you can set up a Vagrant/VirtualBox virtual machine with MS SQL Server. [Here's how](https://github.com/rails-sqlserver/activerecord-sqlserver-adapter-dev-box). + +## TL;DR + +Default testing uses DBLIB with TinyTDS. + +* Setup two databases in SQL Server, [activerecord_unittest] and [activerecord_unittest2] +* Create a [rails] user with an empty password and give it a [db_owner] role to both DBs. Some tests require a server role of [sysadmin] too. More details below with DDL SQL examples. +* `bundle install` +* `bundle exec rake test ACTIVERECORD_UNITTEST_HOST='my.db.net'` + +Focusing tests. Use the `ONLY_` env vars to run either ours or the ActiveRecord cases. Use the `TEST_FILES` env variants to focus on specific test(s), use commas for multiple cases. Note, you have to use different env vars to focus only on ours or a core ActiveRecord case. There may be failures when focusing on an ActiveRecord case since our coereced test files is not loaded in this scenerio. + +```console +$ bundle exec rake test ONLY_SQLSERVER=1 +$ bundle exec rake test ONLY_ACTIVERECORD=1 + +$ bundle exec rake test TEST_FILES="test/cases/adapter_test_sqlserver.rb" +$ bundle exec rake test TEST_FILES_AR="test/cases/finder_test.rb" +``` + + +## Creating the test databases + +The default names for the test databases are `activerecord_unittest` and `activerecord_unittest2`. If you want to use another database name then be sure to update the connection file that matches your connection method in test/connections/native_sqlserver_#{connection_method}/connection.rb. Define a user named 'rails' in SQL Server with all privileges granted for the test databases. Use an empty password for said user. + +```sql +CREATE DATABASE [activerecord_unittest]; +CREATE DATABASE [activerecord_unittest2]; +GO +CREATE LOGIN [rails] WITH PASSWORD = '', CHECK_POLICY = OFF, DEFAULT_DATABASE = [activerecord_unittest]; +GO +USE [activerecord_unittest]; +CREATE USER [rails] FOR LOGIN [rails]; +GO +EXEC sp_addrolemember N'db_owner', N'rails'; +EXEC master..sp_addsrvrolemember @loginame = N'rails', @rolename = N'sysadmin' +GO +``` + +## Cloning The Repos + +The tests of this adapter depend on the existence of the Rails which are automatically cloned for you with bundler. However you can clone Rails from git://github.com/rails/rails.git and set the `RAILS_SOURCE` environment variable so bundler will use another local path instead. + +```console +$ git clone git://github.com/rails-sqlserver/activerecord-sqlserver-adapter.git +``` + +Suggest just letting bundler do all the work and assuming there is a git tag for the Rails version, you can set `RAILS_VERSION` before bundling. + +```console +$ export RAILS_VERSION='4.2.0' +$ bundle install +``` + + +## Configure DB Connection + +Please consult the `test/config.yml` file which is used to parse the configuration options for the DB connections when running tests. This file has overrides for any connection mode that you can set using simple environment variables. Assuming you are using FreeTDS 0.91 and above + +```console +$ export ACTIVERECORD_UNITTEST_HOST='my.db.net' # Defaults to localhost +$ export ACTIVERECORD_UNITTEST_PORT='1533' # Defaults to 1433 +``` + +If you have FreeTDS installed and/or want to use a named dataserver in your freetds.conf file + +```console +$ export ACTIVERECORD_UNITTEST_DATASERVER='mydbname' +``` + +These can be passed down to rake too. + +```console +$ bundle exec rake test ACTIVERECORD_UNITTEST_HOST='my.db.net' +``` + + +## Bundling + +Now with that out of the way you can run "bundle install" to hook everything up. Our tests use bundler to setup the load paths correctly. The default mode is DBLIB using TinyTDS. It is important to use bundle exec so we can wire up the ActiveRecord test libs correctly. + +```console +$ bundle exec rake test +``` + + +## Testing Options + +By default, Bundler will download the Rails git repo and use the git tag that matches the dependency version in our gemspec. If you want to test another version of Rails, you can either temporarily change the :tag for Rails in the Gemfile. Likewise, you can clone the Rails repo your self to another directory and use the `RAILS_SOURCE` environment variable. + +```console +$ RAILS_SOURCE='/vagrant/rails' bundle exec rake test +``` + +If you want to use a specific seed for the tests use the `TESTOPTS` env variable like: + +```console +$ bundle exec rake test TESTOPTS="--seed=45250" +``` + +And to make the tests fail-fast use: + +```console +$ bundle exec rake test TESTOPTS="-f" +``` + +## Troubleshooting + +* Make sure your firewall is off or allows SQL Server traffic both ways, typically on port 1433. +* Ensure that you are running on a local admin login to create the Rails user. +* Possibly change the SQL Server TCP/IP properties in "SQL Server Configuration Manager -> SQL Server Network Configuration -> Protocols for MSSQLSERVER", and ensure that TCP/IP is enabled and the appropriate entries on the "IP Addresses" tab are enabled. + + diff --git a/Rakefile b/Rakefile index 4c13a44c3..8e7c505e8 100644 --- a/Rakefile +++ b/Rakefile @@ -1,75 +1,37 @@ -require 'rake' -require 'rake/testtask' +# frozen_string_literal: true -# Notes for cross compile: -# $ gcla ; bundle install ; rake compile ; rake cross compile ; rake cross native gem - -def test_libs(mode='dblib') - ['lib', - 'test', - "#{File.join(Gem.loaded_specs['activerecord'].full_gem_path,'test')}"] -end - -def test_files - return ENV['TEST_FILES'].split(',').sort if ENV['TEST_FILES'] - files = Dir.glob("test/cases/**/*_test_sqlserver.rb").sort - ar_path = Gem.loaded_specs['activerecord'].full_gem_path - ar_cases = Dir.glob("#{ar_path}/test/cases/**/*_test.rb") - adapter_cases = Dir.glob("#{ar_path}/test/cases/adapters/**/*_test.rb") - files += (ar_cases-adapter_cases).sort - files -end - -task :test => ['test:dblib'] -task :default => [:test] +require "bundler/gem_tasks" +require "rake/testtask" +require_relative "test/support/paths_sqlserver" +require_relative "test/support/rake_helpers" +task test: ["test:dblib"] +task default: [:test] namespace :test do - - ['dblib','odbc'].each do |mode| - + ENV["ARCONN"] = "sqlserver" + + %w[dblib].each do |mode| Rake::TestTask.new(mode) do |t| - t.libs = test_libs(mode) + t.libs = ARTest::SQLServer.test_load_paths t.test_files = test_files - t.verbose = true + t.warning = !!ENV["WARNING"] + t.verbose = false end - end - - task 'dblib:env' do - ENV['ARCONN'] = 'dblib' - end - - task 'odbc:env' do - ENV['ARCONN'] = 'odbc' - end - end -task 'test:dblib' => 'test:dblib:env' -task 'test:odbc' => 'test:odbc:env' - - namespace :profile do - - ['dblib','odbc'].each do |mode| + ["dblib"].each do |mode| namespace mode.to_sym do - Dir.glob("test/profile/*_profile_case.rb").sort.each do |test_file| - - profile_case = File.basename(test_file).sub('_profile_case.rb','') - + profile_case = File.basename(test_file).sub("_profile_case.rb", "") Rake::TestTask.new(profile_case) do |t| - t.libs = test_libs(mode) + t.libs = ARTest::SQLServer.test_load_paths t.test_files = [test_file] t.verbose = true end - end - end end - end - - diff --git a/VERSION b/VERSION index d6bb32f36..69640086a 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -3.2.10 \ No newline at end of file +8.2.0.alpha diff --git a/activerecord-sqlserver-adapter.gemspec b/activerecord-sqlserver-adapter.gemspec index 463165cce..c28b3da1c 100644 --- a/activerecord-sqlserver-adapter.gemspec +++ b/activerecord-sqlserver-adapter.gemspec @@ -1,21 +1,31 @@ -# -*- encoding: utf-8 -*- -$:.push File.expand_path("../lib", __FILE__) +# frozen_string_literal: true -Gem::Specification.new do |s| - s.platform = Gem::Platform::RUBY - s.name = "activerecord-sqlserver-adapter" - s.version = File.read(File.expand_path("../VERSION",__FILE__)).strip - s.summary = "ActiveRecord SQL Server Adapter. For SQL Server 2005 And Higher." - s.description = "ActiveRecord SQL Server Adapter. For SQL Server 2005 And Higher." - - s.authors = ['Ken Collins', 'Murray Steele', 'Shawn Balestracci', 'Joe Rafaniello', 'Tom Ward'] - s.email = "ken@metaskills.net" - s.homepage = "http://github.com/rails-sqlserver/activerecord-sqlserver-adapter" - - s.files = Dir['CHANGELOG', 'MIT-LICENSE', 'README.rdoc', 'VERSION', 'lib/**/*' ] - s.require_path = 'lib' - s.rubyforge_project = 'activerecord-sqlserver-adapter' - - s.add_dependency('activerecord', '~> 3.2.0') -end +version = File.read(File.expand_path("VERSION", __dir__)).strip + +Gem::Specification.new do |spec| + spec.name = "activerecord-sqlserver-adapter" + spec.platform = Gem::Platform::RUBY + spec.version = version + + spec.required_ruby_version = ">= 3.2.0" + + spec.license = "MIT" + spec.authors = ["Ken Collins", "Anna Carey", "Will Bond", "Murray Steele", "Shawn Balestracci", "Joe Rafaniello", "Tom Ward", "Aidan Haran"] + spec.email = ["ken@metaskills.net", "will@wbond.net"] + spec.homepage = "http://github.com/rails-sqlserver/activerecord-sqlserver-adapter" + spec.summary = "ActiveRecord SQL Server Adapter." + spec.description = "ActiveRecord SQL Server Adapter. SQL Server 2012 and upward." + spec.metadata = { + "bug_tracker_uri" => "https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/issues", + "changelog_uri" => "https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/blob/v#{version}/CHANGELOG.md", + "source_code_uri" => "https://github.com/rails-sqlserver/activerecord-sqlserver-adapter/tree/v#{version}" + } + + spec.files = `git ls-files -z`.split("\x0") + spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } + spec.require_paths = ["lib"] + + spec.add_dependency "activerecord", "~> 8.2.0.alpha" + spec.add_dependency "tiny_tds", "~> 3.0" +end diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 000000000..da7123a06 --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,44 @@ +image: Visual Studio 2017 +skip_tags: true +clone_depth: 5 +build: off +matrix: + fast_finish: true + allow_failures: + - ruby_version: "27" + - ruby_version: "27-x64" + - ruby_version: "30" + - ruby_version: "30-x64" +services: + - mssql2014 + +init: + - SET PATH=C:\Ruby%ruby_version%\bin;%PATH% + - SET PATH=C:\MinGW\msys\1.0\bin;%PATH% + - SET RAKEOPT=-rdevkit + - SET TINYTDS_VERSION=2.1.3.pre + +install: + - ps: Update-AppveyorBuild -Version "$(Get-Content $env:appveyor_build_folder\VERSION).$env:appveyor_build_number" + - ruby --version + - gem --version + - bundle install + - gem uninstall bcrypt + - gem install bcrypt --platform=ruby + +test_script: + - powershell -File "%APPVEYOR_BUILD_FOLDER%\test\appveyor\dbsetup.ps1" + - timeout /t 4 /nobreak > NUL + - sqlcmd -S ".\SQL2014" -U sa -P Password12! -i %APPVEYOR_BUILD_FOLDER%\test\appveyor\dbsetup.sql + - bundle exec rake test ACTIVERECORD_UNITTEST_DATASERVER="localhost\SQL2014" + +environment: + CI_AZURE_HOST: + secure: VChrioaIWkf9iuuaSs4cryiA4honrADgZqNC0++begg= + CI_AZURE_PASS: + secure: cSQp8sk4urJYvq0utpsK+r7J+snJ2wpcdp8RdXJfB+w= + matrix: + - ruby_version: "27-x64" + - ruby_version: "27" + - ruby_version: "30-x64" + - ruby_version: "30" diff --git a/compose.ci.yaml b/compose.ci.yaml new file mode 100644 index 000000000..8242db9b8 --- /dev/null +++ b/compose.ci.yaml @@ -0,0 +1,20 @@ +services: + sqlserver: + image: ghcr.io/rails-sqlserver/mssql-server-linux-rails + ci: + environment: + - ACTIVERECORD_UNITTEST_HOST=sqlserver + - RAILS_BRANCH=main + build: + context: . + dockerfile: Dockerfile.ci + command: wait-for sqlserver:1433 -- bundle exec rake test + depends_on: + - "sqlserver" + standardrb: + environment: + - RAILS_BRANCH=main + build: + context: . + dockerfile: Dockerfile.ci + command: bundle exec standardrb diff --git a/guides/RELEASING.md b/guides/RELEASING.md new file mode 100644 index 000000000..e41ef6c1a --- /dev/null +++ b/guides/RELEASING.md @@ -0,0 +1,11 @@ +# Releasing + +## Building locally + +If you want to build the gem to test it locally run `bundle exec rake build`. + +This command will build the gem in `pkg/activerecord-sqlserver-adapter-A.B.C.gem`, where `A.B.C` is the version in `VERSION` file. + +## Releasing to RubyGems + +Run `bundle exec rake release` to build the gem locally and push the `gem` file to RubyGems. diff --git a/lib/active_record/connection_adapters/sqlserver/core_ext/abstract_adapter.rb b/lib/active_record/connection_adapters/sqlserver/core_ext/abstract_adapter.rb new file mode 100644 index 000000000..90b1bead1 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/core_ext/abstract_adapter.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module CoreExt + module AbstractAdapter + def sqlserver? + false + end + end + end + end + end +end + +ActiveSupport.on_load(:active_record) do + mod = ActiveRecord::ConnectionAdapters::SQLServer::CoreExt::AbstractAdapter + ActiveRecord::ConnectionAdapters::AbstractAdapter.prepend(mod) +end diff --git a/lib/active_record/connection_adapters/sqlserver/core_ext/active_record.rb b/lib/active_record/connection_adapters/sqlserver/core_ext/active_record.rb index a264105f1..70b9aaee4 100644 --- a/lib/active_record/connection_adapters/sqlserver/core_ext/active_record.rb +++ b/lib/active_record/connection_adapters/sqlserver/core_ext/active_record.rb @@ -1,42 +1,27 @@ +# frozen_string_literal: true + module ActiveRecord module ConnectionAdapters - module Sqlserver + module SQLServer module CoreExt module ActiveRecord - extend ActiveSupport::Concern - - included do - class_attribute :coerced_sqlserver_date_columns, :coerced_sqlserver_time_columns - self.coerced_sqlserver_date_columns = Set.new - self.coerced_sqlserver_time_columns = Set.new - end module ClassMethods - def execute_procedure(proc_name, *variables) if connection.respond_to?(:execute_procedure) - connection.execute_procedure(proc_name,*variables) + connection.execute_procedure(proc_name, *variables) else [] end end - - def coerce_sqlserver_date(*attributes) - self.coerced_sqlserver_date_columns += attributes.map(&:to_s) - end - - def coerce_sqlserver_time(*attributes) - self.coerced_sqlserver_time_columns += attributes.map(&:to_s) - end - end - end end end end end - -ActiveRecord::Base.send :include, ActiveRecord::ConnectionAdapters::Sqlserver::CoreExt::ActiveRecord +ActiveSupport.on_load(:active_record) do + include ActiveRecord::ConnectionAdapters::SQLServer::CoreExt::ActiveRecord +end diff --git a/lib/active_record/connection_adapters/sqlserver/core_ext/attribute_methods.rb b/lib/active_record/connection_adapters/sqlserver/core_ext/attribute_methods.rb new file mode 100644 index 000000000..f65400a87 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/core_ext/attribute_methods.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +require "active_record/attribute_methods" + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module CoreExt + module AttributeMethods + private + + def attributes_for_update(attribute_names) + self.class.with_connection do |connection| + return super(attribute_names) unless connection.sqlserver? + + super(attribute_names).reject do |name| + column = self.class.columns_hash[name] + column&.respond_to?(:is_identity?) && column.is_identity? + end + end + end + end + end + end + end +end + +ActiveSupport.on_load(:active_record) do + include ActiveRecord::ConnectionAdapters::SQLServer::CoreExt::AttributeMethods +end diff --git a/lib/active_record/connection_adapters/sqlserver/core_ext/database_statements.rb b/lib/active_record/connection_adapters/sqlserver/core_ext/database_statements.rb deleted file mode 100644 index a6cf73fb1..000000000 --- a/lib/active_record/connection_adapters/sqlserver/core_ext/database_statements.rb +++ /dev/null @@ -1,97 +0,0 @@ -module ActiveRecord - module ConnectionAdapters - module Sqlserver - module CoreExt - module DatabaseStatements - - # This is a copy of the current (3.1.3) ActiveRecord's transaction method. We should propose - # a patch to the default transaction method to make it more callback for adapters that want to - # do deadlock retry logic. Because this is a copy, we really need to keep an eye out on this when - # upgradding the adapter. - def transaction_with_retry_deadlock_victim(options = {}) - options.assert_valid_keys :requires_new, :joinable - - last_transaction_joinable = defined?(@transaction_joinable) ? @transaction_joinable : nil - if options.has_key?(:joinable) - @transaction_joinable = options[:joinable] - else - @transaction_joinable = true - end - requires_new = options[:requires_new] || !last_transaction_joinable - - transaction_open = false - @_current_transaction_records ||= [] - - begin - if block_given? - if requires_new || open_transactions == 0 - if open_transactions == 0 - begin_db_transaction - elsif requires_new - create_savepoint - end - increment_open_transactions - transaction_open = true - @_current_transaction_records.push([]) - end - yield - end - rescue Exception => database_transaction_rollback - if transaction_open && !outside_transaction? - transaction_open = false - decrement_open_transactions - # handle deadlock victim retries at the outermost transaction - if open_transactions == 0 - if database_transaction_rollback.is_a?(::ActiveRecord::DeadlockVictim) - # SQL Server has already rolled back, so rollback activerecord's history - rollback_transaction_records(true) - retry - else - rollback_db_transaction - rollback_transaction_records(true) - end - else - rollback_to_savepoint - rollback_transaction_records(false) - end - end - raise unless database_transaction_rollback.is_a?(::ActiveRecord::Rollback) - end - ensure - @transaction_joinable = last_transaction_joinable - - if outside_transaction? - @open_transactions = 0 - elsif transaction_open - decrement_open_transactions - begin - if open_transactions == 0 - commit_db_transaction - commit_transaction_records - else - release_savepoint - save_point_records = @_current_transaction_records.pop - unless save_point_records.blank? - @_current_transaction_records.push([]) if @_current_transaction_records.empty? - @_current_transaction_records.last.concat(save_point_records) - end - end - rescue Exception => database_transaction_rollback - if open_transactions == 0 - rollback_db_transaction - rollback_transaction_records(true) - else - rollback_to_savepoint - rollback_transaction_records(false) - end - raise - end - end - end - - end - end - end - end -end - diff --git a/lib/active_record/connection_adapters/sqlserver/core_ext/explain.rb b/lib/active_record/connection_adapters/sqlserver/core_ext/explain.rb index 3d00e10ed..137e17c5d 100644 --- a/lib/active_record/connection_adapters/sqlserver/core_ext/explain.rb +++ b/lib/active_record/connection_adapters/sqlserver/core_ext/explain.rb @@ -1,41 +1,54 @@ +# frozen_string_literal: true + module ActiveRecord module ConnectionAdapters - module Sqlserver + module SQLServer module CoreExt module Explain - SQLSERVER_STATEMENT_PREFIX = "EXEC sp_executesql " - SQLSERVER_PARAM_MATCHER = /@\d+ =/ - - def exec_explain(queries) - unprepared_queries = queries.map { |sql, bind| [unprepare_sqlserver_statement(sql), bind] } - super(unprepared_queries) + SQLSERVER_STATEMENT_REGEXP = /N'(.+)', N'(.+)', (.+)/ + + def exec_explain(queries, options = []) + with_connection do |connection| + return super(queries, options) unless connection.sqlserver? + + unprepared_queries = queries.map do |(sql, binds)| + [unprepare_sqlserver_statement(sql, binds), binds] + end + + super(unprepared_queries, options) + end end - + private - - # This is somewhat hacky, but it should reliably reformat our prepared sql statment - # which uses sp_executesql to just the first argument, then unquote it. Likewise our - # do_exec_query method should substitude the @n args withe the quoted values. - def unprepare_sqlserver_statement(sql) - if sql.starts_with?(SQLSERVER_STATEMENT_PREFIX) - executesql = sql.from(SQLSERVER_STATEMENT_PREFIX.length) - executesql_args = executesql.split(', ') - found_args = executesql_args.reject! { |arg| arg =~ SQLSERVER_PARAM_MATCHER } - executesql_args.pop if found_args && executesql_args.many? - executesql = executesql_args.join(', ').strip.match(/N'(.*)'/m)[1] - Utils.unquote_string(executesql) - else - sql + + # This is somewhat hacky, but it should reliably reformat our prepared sql statement + # which uses sp_executesql to just the first argument, then unquote it. Likewise our + # `sp_executesql` method should substitute the @n args with the quoted values. + def unprepare_sqlserver_statement(sql, binds) + return sql unless sql.start_with?(SQLSERVER_STATEMENT_PREFIX) + + executesql = sql.from(SQLSERVER_STATEMENT_PREFIX.length) + executesql = executesql.match(SQLSERVER_STATEMENT_REGEXP).to_a[1] + + binds.each_with_index do |bind, index| + value = if bind.is_a?(::ActiveModel::Attribute) + connection.quote(bind.value_for_database) + else + connection.quote(bind) + end + executesql = executesql.sub("@#{index}", value) end - end - + executesql + end end end end end end -ActiveRecord::Base.extend ActiveRecord::ConnectionAdapters::Sqlserver::CoreExt::Explain -ActiveRecord::Relation.send :include, ActiveRecord::ConnectionAdapters::Sqlserver::CoreExt::Explain +ActiveSupport.on_load(:active_record) do + extend ActiveRecord::ConnectionAdapters::SQLServer::CoreExt::Explain + ActiveRecord::Relation.include(ActiveRecord::ConnectionAdapters::SQLServer::CoreExt::Explain) +end diff --git a/lib/active_record/connection_adapters/sqlserver/core_ext/explain_subscriber.rb b/lib/active_record/connection_adapters/sqlserver/core_ext/explain_subscriber.rb new file mode 100644 index 000000000..80dd6a2cf --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/core_ext/explain_subscriber.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +require "active_support/core_ext/kernel/reporting" + +ActiveSupport.on_load(:active_record) do + silence_warnings do + # Already defined in Rails + ActiveRecord::ExplainRegistry::Subscriber::EXPLAINED_SQLS = /(select|update|delete|insert)\b/i + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/core_ext/finder_methods.rb b/lib/active_record/connection_adapters/sqlserver/core_ext/finder_methods.rb new file mode 100644 index 000000000..ec508347b --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/core_ext/finder_methods.rb @@ -0,0 +1,56 @@ +# frozen_string_literal: true + +require "active_record/relation" +require "active_record/version" + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module CoreExt + module FinderMethods + private + + def construct_relation_for_exists(conditions) + model.with_connection do |connection| + if connection.sqlserver? + _construct_relation_for_exists(conditions) + else + super + end + end + end + + # Same as original except we order by values in distinct select if present. + def _construct_relation_for_exists(conditions) + conditions = sanitize_forbidden_attributes(conditions) + + relation = if distinct_value && offset_value + # Start of monkey-patch + if select_values.present? + order(*select_values).limit!(1) + else + except(:order).limit!(1) + end + # End of monkey-patch + else + except(:select, :distinct, :order)._select!(Arel.sql(::ActiveRecord::FinderMethods::ONE_AS_ONE, retryable: true)).limit!(1) + end + + case conditions + when Array, Hash + relation.where!(conditions) unless conditions.empty? + else + relation.where!(primary_key => conditions) unless conditions == :none + end + + relation + end + end + end + end + end +end + +ActiveSupport.on_load(:active_record) do + ActiveRecord::Relation.include(ActiveRecord::ConnectionAdapters::SQLServer::CoreExt::FinderMethods) +end diff --git a/lib/active_record/connection_adapters/sqlserver/core_ext/odbc.rb b/lib/active_record/connection_adapters/sqlserver/core_ext/odbc.rb deleted file mode 100644 index c81cd8f6d..000000000 --- a/lib/active_record/connection_adapters/sqlserver/core_ext/odbc.rb +++ /dev/null @@ -1,38 +0,0 @@ -module ActiveRecord - module ConnectionAdapters - module Sqlserver - module CoreExt - module ODBC - - module Statement - - def finished? - begin - connected? - false - rescue ::ODBC::Error - true - end - end - - end - - module Database - - def run_block(*args) - yield sth = run(*args) - sth.drop - end - - end - - end - end - end - end -end - - -ODBC::Statement.send :include, ActiveRecord::ConnectionAdapters::Sqlserver::CoreExt::ODBC::Statement -ODBC::Database.send :include, ActiveRecord::ConnectionAdapters::Sqlserver::CoreExt::ODBC::Database - diff --git a/lib/active_record/connection_adapters/sqlserver/core_ext/preloader.rb b/lib/active_record/connection_adapters/sqlserver/core_ext/preloader.rb new file mode 100644 index 000000000..d7bc7b9e4 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/core_ext/preloader.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +require "active_record/associations/preloader" + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module CoreExt + module LoaderQuery + def load_records_for_keys(keys, &block) + scope.with_connection do |connection| + return super unless connection.sqlserver? + + return [] if keys.empty? + + if association_key_name.is_a?(Array) + query_constraints = Hash.new { |hsh, key| hsh[key] = Set.new } + + keys.each_with_object(query_constraints) do |values_set, constraints| + association_key_name.zip(values_set).each do |key_name, value| + constraints[key_name] << value + end + end + + scope.where(query_constraints).load(&block) + else + keys.each_slice(in_clause_length).flat_map do |slice| + scope.where(association_key_name => slice).load(&block).records + end + end + end + end + + def in_clause_length + 10_000 + end + end + end + end + end +end + +ActiveSupport.on_load(:active_record) do + mod = ActiveRecord::ConnectionAdapters::SQLServer::CoreExt::LoaderQuery + ActiveRecord::Associations::Preloader::Association::LoaderQuery.prepend(mod) +end diff --git a/lib/active_record/connection_adapters/sqlserver/core_ext/relation.rb b/lib/active_record/connection_adapters/sqlserver/core_ext/relation.rb deleted file mode 100644 index a84bf081a..000000000 --- a/lib/active_record/connection_adapters/sqlserver/core_ext/relation.rb +++ /dev/null @@ -1,19 +0,0 @@ -module ActiveRecord - module ConnectionAdapters - module Sqlserver - module CoreExt - module Relation - - private - - def tables_in_string(string) - super - ['__rnt'] - end - - end - end - end - end -end - -ActiveRecord::Relation.send :include, ActiveRecord::ConnectionAdapters::Sqlserver::CoreExt::Relation diff --git a/lib/active_record/connection_adapters/sqlserver/database_limits.rb b/lib/active_record/connection_adapters/sqlserver/database_limits.rb index 3a7f33858..5d8973ab0 100644 --- a/lib/active_record/connection_adapters/sqlserver/database_limits.rb +++ b/lib/active_record/connection_adapters/sqlserver/database_limits.rb @@ -1,16 +1,13 @@ +# frozen_string_literal: true + module ActiveRecord module ConnectionAdapters - module Sqlserver + module SQLServer module DatabaseLimits - def table_alias_length 128 end - def column_name_length - 128 - end - def table_name_length 128 end @@ -19,30 +16,18 @@ def index_name_length 128 end - def columns_per_table - 1024 - end - - def indexes_per_table - 999 - end - - def columns_per_multicolumn_index - 16 - end - - def in_clause_length - 65536 - end + private - def sql_query_length - 65536 * 4096 + # The max number of binds is 2100, but because sp_executesql takes + # the first 2 params as the query string and the list of types, + # we have only 2098 spaces left + def bind_params_length + 2_098 end - def joins_per_query - 256 + def insert_rows_length + 1_000 end - end end end diff --git a/lib/active_record/connection_adapters/sqlserver/database_statements.rb b/lib/active_record/connection_adapters/sqlserver/database_statements.rb index 6185e5f1d..daf635d63 100644 --- a/lib/active_record/connection_adapters/sqlserver/database_statements.rb +++ b/lib/active_record/connection_adapters/sqlserver/database_statements.rb @@ -1,449 +1,626 @@ +# frozen_string_literal: true + module ActiveRecord module ConnectionAdapters - module Sqlserver + module SQLServer module DatabaseStatements - - include CoreExt::DatabaseStatements - - def select_rows(sql, name = nil) - raw_select sql, name, [], :fetch => :rows + READ_QUERY = ActiveRecord::ConnectionAdapters::AbstractAdapter.build_read_query_regexp(:begin, :commit, :dbcc, :explain, :save, :select, :set, :rollback, :waitfor, :use) # :nodoc: + private_constant :READ_QUERY + + def write_query?(sql) # :nodoc: + !READ_QUERY.match?(sql) + rescue ArgumentError # Invalid encoding + !READ_QUERY.match?(sql.b) end - def execute(sql, name = nil) - if id_insert_table_name = query_requires_identity_insert?(sql) - with_identity_insert_enabled(id_insert_table_name) { do_execute(sql,name) } - else - do_execute(sql,name) + def perform_query(raw_connection, sql, binds, type_casted_binds, prepare:, notification_payload:, batch:) + unless binds.nil? || binds.empty? + types, params = sp_executesql_types_and_parameters(binds) + sql = sp_executesql_sql(sql, types, params, notification_payload[:name]) end - end - - def exec_query(sql, name = 'SQL', binds = [], sqlserver_options = {}) - if id_insert_table_name = sqlserver_options[:insert] ? query_requires_identity_insert?(sql) : nil - with_identity_insert_enabled(id_insert_table_name) { do_exec_query(sql, name, binds) } + + id_insert_table_name = query_requires_identity_insert?(sql) + + result, affected_rows = if id_insert_table_name + with_identity_insert_enabled(id_insert_table_name, raw_connection) do + internal_exec_sql_query(sql, raw_connection) + end else - do_exec_query(sql, name, binds) + internal_exec_sql_query(sql, raw_connection) end + + verified! + notification_payload[:affected_rows] = affected_rows + notification_payload[:row_count] = result.count + result end - - def exec_insert(sql, name, binds) - exec_query sql, name, binds, :insert => true + + # Method `perform_query` already returns an `ActiveRecord::Result` so we have nothing to cast here. This is + # different to the MySQL/PostgreSQL adapters where the raw result is converted to `ActiveRecord::Result` in + # `cast_result`. + def cast_result(result) + result end - - def exec_delete(sql, name, binds) - sql << "; SELECT @@ROWCOUNT AS AffectedRows" - super.rows.first.first + + # Returns the affected rows from results. + def affected_rows(raw_result) + column_name = lowercase_schema_reflection ? "affectedrows" : "AffectedRows" + raw_result&.first&.fetch(column_name, nil) end - def exec_update(sql, name, binds) - sql << "; SELECT @@ROWCOUNT AS AffectedRows" - super.rows.first.first + # Returns the affected rows from results or handle. + def affected_rows_from_results_or_handle(raw_result, handle) + affected_rows(raw_result) || handle.affected_rows end - def outside_transaction? - select_value('SELECT @@TRANCOUNT', 'SCHEMA') == 0 + def internal_exec_sql_query(sql, conn) + handle = internal_raw_execute(sql, conn) + results = handle_to_names_and_values(handle, ar_result: true) + + [results, affected_rows_from_results_or_handle(results, handle)] + ensure + finish_statement_handle(handle) end - - def supports_statement_cache? - true + + def exec_delete(sql, name = nil, binds = []) + sql = sql.dup << "; SELECT @@ROWCOUNT AS AffectedRows" + super end - def transaction(options = {}) - if retry_deadlock_victim? - block_given? ? transaction_with_retry_deadlock_victim(options) { yield } : transaction_with_retry_deadlock_victim(options) - else - block_given? ? super(options) { yield } : super(options) - end + def exec_update(sql, name = nil, binds = []) + sql = sql.dup << "; SELECT @@ROWCOUNT AS AffectedRows" + super end def begin_db_transaction - do_execute "BEGIN TRANSACTION" + internal_execute("BEGIN TRANSACTION", "TRANSACTION", allow_retry: true, materialize_transactions: false) + end + + def transaction_isolation_levels + super.merge snapshot: "SNAPSHOT" + end + + def begin_isolated_db_transaction(isolation) + set_transaction_isolation_level(transaction_isolation_levels.fetch(isolation)) + begin_db_transaction + end + + def set_transaction_isolation_level(isolation_level) + internal_execute("SET TRANSACTION ISOLATION LEVEL #{isolation_level}", "TRANSACTION", allow_retry: true, materialize_transactions: false) end def commit_db_transaction - disable_auto_reconnect { do_execute "COMMIT TRANSACTION" } + internal_execute("COMMIT TRANSACTION", "TRANSACTION", allow_retry: false, materialize_transactions: true) end - def rollback_db_transaction - do_execute "IF @@TRANCOUNT > 0 ROLLBACK TRANSACTION" + def exec_rollback_db_transaction + internal_execute("IF @@TRANCOUNT > 0 ROLLBACK TRANSACTION", "TRANSACTION", allow_retry: false, materialize_transactions: true) end - def create_savepoint - disable_auto_reconnect { do_execute "SAVE TRANSACTION #{current_savepoint_name}" } + def case_sensitive_comparison(attribute, value) + column = column_for_attribute(attribute) + + if column.collation && !column.case_sensitive? + attribute.eq(Arel::Nodes::Bin.new(value)) + else + super + end + end + + # We should propose this change to Rails team + def insert_fixtures_set(fixture_set, tables_to_delete = []) + fixture_inserts = [] + + fixture_set.each do |table_name, fixtures| + fixtures.each_slice(insert_rows_length) do |batch| + fixture_inserts << build_fixture_sql(batch, table_name) + end + end + + table_deletes = tables_to_delete.map { |table| "DELETE FROM #{quote_table_name table}" } + total_sqls = Array.wrap(table_deletes + fixture_inserts) + + disable_referential_integrity do + transaction(requires_new: true) do + total_sqls.each do |sql| + execute sql, "Fixtures Load" + yield if block_given? + end + end + end end - def release_savepoint + def can_perform_case_insensitive_comparison_for?(column) + column.type == :string && (!column.collation || column.case_sensitive?) end + private :can_perform_case_insensitive_comparison_for? - def rollback_to_savepoint - disable_auto_reconnect { do_execute "ROLLBACK TRANSACTION #{current_savepoint_name}" } + def default_insert_value(column) + if column.is_identity? + table_name = quote(quote_table_name(column.table_name)) + Arel.sql("IDENT_CURRENT(#{table_name}) + IDENT_INCR(#{table_name})") + else + super + end end + private :default_insert_value + + def build_insert_sql(insert) # :nodoc: + # Use regular insert if not skipping/updating duplicates. + return build_sql_for_regular_insert(insert:) unless insert.skip_duplicates? || insert.update_duplicates? + + insert_all = insert.send(:insert_all) + columns_with_uniqueness_constraints = get_columns_with_uniqueness_constraints(insert_all:, insert:) - def add_limit_offset!(sql, options) - raise NotImplementedError, 'This has been moved to the SQLServerCompiler in Arel.' + # If we do not have any columns that might have conflicting values just execute a regular insert, else use merge. + if columns_with_uniqueness_constraints.flatten.empty? + build_sql_for_regular_insert(insert:) + else + build_sql_for_merge_insert(insert:, insert_all:, columns_with_uniqueness_constraints:) + end end - def empty_insert_statement_value - "DEFAULT VALUES" + def build_sql_for_merge_insert(insert:, insert_all:, columns_with_uniqueness_constraints:) # :nodoc: + insert_all.inserts.reverse! if insert.update_duplicates? + + sql = <<~SQL + MERGE INTO #{insert.model.quoted_table_name} WITH (UPDLOCK, HOLDLOCK) AS target + USING ( + SELECT * + FROM ( + SELECT #{insert.send(:columns_list)}, #{partition_by_columns_with_uniqueness_constraints(columns_with_uniqueness_constraints:)} + FROM (#{merge_insert_values_list(insert:, insert_all:)}) + AS t1 (#{insert.send(:columns_list)}) + ) AS ranked_source + WHERE #{is_first_record_across_all_uniqueness_constraints(columns_with_uniqueness_constraints:)} + ) AS source + ON (#{joining_on_columns_with_uniqueness_constraints(columns_with_uniqueness_constraints:)}) + SQL + + if insert.update_duplicates? + sql << " WHEN MATCHED THEN UPDATE SET " + + if insert.raw_update_sql? + sql << insert.raw_update_sql + else + if insert.record_timestamps? + sql << build_sql_for_recording_timestamps_when_updating(insert:) + end + + sql << insert.updatable_columns.map { |column| "target.#{quote_column_name(column)}=source.#{quote_column_name(column)}" }.join(",") + end + end + sql << " WHEN NOT MATCHED BY TARGET THEN" + sql << " INSERT (#{insert.send(:columns_list)}) VALUES (#{insert_all.keys_including_timestamps.map { |column| "source.#{quote_column_name(column)}" }.join(", ")})" + sql << build_sql_for_returning(insert:, insert_all: insert.send(:insert_all)) + sql << ";" + + sql end - def case_sensitive_modifier(node) - node.acts_like?(:string) ? Arel::Nodes::Bin.new(node) : node + # For `nil` identity columns we need to ensure that the values do not match so that they are all inserted. + # Method is a combination of `ActiveRecord::InsertAll#values_list` and `ActiveRecord::ConnectionAdapters::SQLServer::DatabaseStatements#default_insert_value`. + def merge_insert_values_list(insert:, insert_all:) + connection = insert.send(:connection) + identity_index = 0 + + types = insert.send(:extract_types_for, insert.keys_including_timestamps) + + values_list = insert_all.map_key_with_value do |key, value| + if Arel::Nodes::SqlLiteral === value + value + elsif insert.primary_keys.include?(key) && value.nil? + column = insert.model.columns_hash[key] + + if column.is_identity? + identity_index += 1 + table_name = quote(quote_table_name(column.table_name)) + Arel.sql("IDENT_CURRENT(#{table_name}) + (IDENT_INCR(#{table_name}) * #{identity_index})") + else + connection.default_insert_value(column) + end + else + ActiveModel::Type::SerializeCastValue.serialize(type = types[key], type.cast(value)) + end + end + + connection.visitor.compile(Arel::Nodes::ValuesList.new(values_list)) end - + # === SQLServer Specific ======================================== # - + def execute_procedure(proc_name, *variables) vars = if variables.any? && variables.first.is_a?(Hash) - variables.first.map { |k,v| "@#{k} = #{quote(v)}" } - else - variables.map { |v| quote(v) } - end.join(', ') + variables.first.map { |k, v| "@#{k} = #{quote(v)}" } + else + variables.map { |v| quote(v) } + end.join(", ") sql = "EXEC #{proc_name} #{vars}".strip - name = 'Execute Procedure' - log(sql, name) do - case @connection_options[:mode] - when :dblib - result = @connection.execute(sql) - result.each(:as => :hash, :cache_rows => true) do |row| + + log(sql, "Execute Procedure") do |notification_payload| + with_raw_connection do |conn| + result = internal_raw_execute(sql, conn) + verified! + options = {as: :hash, cache_rows: true, timezone: ActiveRecord.default_timezone || :utc} + + result.each(options) do |row| r = row.with_indifferent_access yield(r) if block_given? end - result.each.map{ |row| row.is_a?(Hash) ? row.with_indifferent_access : row } - when :odbc - results = [] - raw_connection_run(sql) do |handle| - get_rows = lambda { - rows = handle_to_names_and_values handle, :fetch => :all - rows.each_with_index { |r,i| rows[i] = r.with_indifferent_access } - results << rows - } - get_rows.call - while handle_more_results?(handle) - get_rows.call - end - end - results.many? ? results : results.first + + result = result.each.map { |row| row.is_a?(Hash) ? row.with_indifferent_access : row } + notification_payload[:row_count] = result.count + result end end end - - def use_database(database=nil) + + def with_identity_insert_enabled(table_name, conn) + # If the table name is a view, we need to get the base table name for enabling identity insert. + table_name = view_table_name(table_name) if view_exists?(table_name) + quoted_table_name = quote_table_name(table_name) + + set_identity_insert(quoted_table_name, conn, true) + yield + ensure + set_identity_insert(quoted_table_name, conn, false) + end + + def use_database(database = nil) return if sqlserver_azure? - database ||= @connection_options[:database] - do_execute "USE #{quote_table_name(database)}" unless database.blank? + + name = SQLServer::Utils.extract_identifiers(database || @connection_parameters[:database]).quoted + execute("USE #{name}", "SCHEMA") unless name.blank? end - + def user_options return {} if sqlserver_azure? - select_rows("dbcc useroptions",'SCHEMA').inject(HashWithIndifferentAccess.new) do |values,row| - set_option = row[0].gsub(/\s+/,'_') - user_value = row[1] + + rows = select_rows("DBCC USEROPTIONS WITH NO_INFOMSGS", "SCHEMA") + rows = rows.first if rows.size == 2 && rows.last.empty? + rows.each_with_object(HashWithIndifferentAccess.new) do |row, values| + if row.instance_of? Hash + set_option = row.values[0].gsub(/\s+/, "_") + user_value = row.values[1] + elsif row.instance_of? Array + set_option = row[0].gsub(/\s+/, "_") + user_value = row[1] + end values[set_option] = user_value - values end end - + def user_options_dateformat if sqlserver_azure? - select_value 'SELECT [dateformat] FROM [sys].[syslanguages] WHERE [langid] = @@LANGID', 'SCHEMA' + select_value "SELECT [dateformat] FROM [sys].[syslanguages] WHERE [name] = @@LANGUAGE", "SCHEMA" else - user_options['dateformat'] + user_options["dateformat"] end end - + def user_options_isolation_level if sqlserver_azure? - sql = %|SELECT CASE [transaction_isolation_level] + sql = %(SELECT CASE [transaction_isolation_level] WHEN 0 THEN NULL - WHEN 1 THEN 'READ UNCOMITTED' - WHEN 2 THEN 'READ COMITTED' - WHEN 3 THEN 'REPEATABLE READ' - WHEN 4 THEN 'SERIALIZABLE' - WHEN 5 THEN 'SNAPSHOT' END AS [isolation_level] - FROM [sys].[dm_exec_sessions] - WHERE [session_id] = @@SPID|.squish - select_value sql, 'SCHEMA' + WHEN 1 THEN 'READ UNCOMMITTED' + WHEN 2 THEN 'READ COMMITTED' + WHEN 3 THEN 'REPEATABLE READ' + WHEN 4 THEN 'SERIALIZABLE' + WHEN 5 THEN 'SNAPSHOT' END AS [isolation_level] + FROM [sys].[dm_exec_sessions] + WHERE [session_id] = @@SPID).squish + select_value sql, "SCHEMA" else - user_options['isolation_level'] + user_options["isolation_level"] end end - + def user_options_language if sqlserver_azure? - select_value 'SELECT @@LANGUAGE AS [language]', 'SCHEMA' + select_value "SELECT @@LANGUAGE AS [language]", "SCHEMA" else - user_options['language'] + user_options["language"] end end - def run_with_isolation_level(isolation_level) - raise ArgumentError, "Invalid isolation level, #{isolation_level}. Supported levels include #{valid_isolation_levels.to_sentence}." if !valid_isolation_levels.include?(isolation_level.upcase) - initial_isolation_level = user_options_isolation_level || "READ COMMITTED" - do_execute "SET TRANSACTION ISOLATION LEVEL #{isolation_level}" - begin - yield - ensure - do_execute "SET TRANSACTION ISOLATION LEVEL #{initial_isolation_level}" - end if block_given? - end - def newid_function select_value "SELECT NEWID()" end - + def newsequentialid_function select_value "SELECT NEWSEQUENTIALID()" end - - def activity_stats - select_all %| - SELECT - [session_id] = s.session_id, - [user_process] = CONVERT(CHAR(1), s.is_user_process), - [login] = s.login_name, - [database] = ISNULL(db_name(r.database_id), N''), - [task_state] = ISNULL(t.task_state, N''), - [command] = ISNULL(r.command, N''), - [application] = ISNULL(s.program_name, N''), - [wait_time_ms] = ISNULL(w.wait_duration_ms, 0), - [wait_type] = ISNULL(w.wait_type, N''), - [wait_resource] = ISNULL(w.resource_description, N''), - [blocked_by] = ISNULL(CONVERT (varchar, w.blocking_session_id), ''), - [head_blocker] = - CASE - -- session has an active request, is blocked, but is blocking others - WHEN r2.session_id IS NOT NULL AND r.blocking_session_id = 0 THEN '1' - -- session is idle but has an open tran and is blocking others - WHEN r.session_id IS NULL THEN '1' - ELSE '' - END, - [total_cpu_ms] = s.cpu_time, - [total_physical_io_mb] = (s.reads + s.writes) * 8 / 1024, - [memory_use_kb] = s.memory_usage * 8192 / 1024, - [open_transactions] = ISNULL(r.open_transaction_count,0), - [login_time] = s.login_time, - [last_request_start_time] = s.last_request_start_time, - [host_name] = ISNULL(s.host_name, N''), - [net_address] = ISNULL(c.client_net_address, N''), - [execution_context_id] = ISNULL(t.exec_context_id, 0), - [request_id] = ISNULL(r.request_id, 0), - [workload_group] = N'' - FROM sys.dm_exec_sessions s LEFT OUTER JOIN sys.dm_exec_connections c ON (s.session_id = c.session_id) - LEFT OUTER JOIN sys.dm_exec_requests r ON (s.session_id = r.session_id) - LEFT OUTER JOIN sys.dm_os_tasks t ON (r.session_id = t.session_id AND r.request_id = t.request_id) - LEFT OUTER JOIN - (SELECT *, ROW_NUMBER() OVER (PARTITION BY waiting_task_address ORDER BY wait_duration_ms DESC) AS row_num - FROM sys.dm_os_waiting_tasks - ) w ON (t.task_address = w.waiting_task_address) AND w.row_num = 1 - LEFT OUTER JOIN sys.dm_exec_requests r2 ON (r.session_id = r2.blocking_session_id) - WHERE db_name(r.database_id) = '#{current_database}' - ORDER BY s.session_id| - end - - # === SQLServer Specific (Rake/Test Helpers) ==================== # - - def recreate_database - remove_database_connections_and_rollback do - do_execute "EXEC sp_MSforeachtable 'DROP TABLE ?'" + + protected + + def sql_for_insert(sql, pk, binds, returning) + if pk.nil? + table_name = query_requires_identity_insert?(sql) + pk = primary_key(table_name) end - end - def recreate_database!(database=nil) - current_db = current_database - database ||= current_db - this_db = database.to_s == current_db - do_execute 'USE master' if this_db - drop_database(database) - create_database(database) - ensure - use_database(current_db) if this_db - end - - def drop_database(database) - retry_count = 0 - max_retries = 1 - begin - do_execute "DROP DATABASE #{quote_table_name(database)}" - rescue ActiveRecord::StatementInvalid => err - if err.message =~ /because it is currently in use/i - raise if retry_count >= max_retries - retry_count += 1 - remove_database_connections_and_rollback(database) - retry - elsif err.message =~ /does not exist/i - nil + sql = if pk && use_output_inserted? && !database_prefix_remote_server? + table_name ||= get_table_name(sql) + exclude_output_inserted = exclude_output_inserted_table_name?(table_name, sql) + + if exclude_output_inserted + pk_and_types = Array(pk).map do |subkey| + { + quoted: SQLServer::Utils.extract_identifiers(subkey).quoted, + id_sql_type: exclude_output_inserted_id_sql_type(subkey, exclude_output_inserted) + } + end + + <<~SQL.squish + DECLARE @ssaIdInsertTable table (#{pk_and_types.map { |pk_and_type| "#{pk_and_type[:quoted]} #{pk_and_type[:id_sql_type]}" }.join(", ")}); + #{sql.dup.insert sql.index(/ (DEFAULT )?VALUES/i), " OUTPUT #{pk_and_types.map { |pk_and_type| "INSERTED.#{pk_and_type[:quoted]}" }.join(", ")} INTO @ssaIdInsertTable"} + SELECT #{pk_and_types.map { |pk_and_type| "CAST(#{pk_and_type[:quoted]} AS #{pk_and_type[:id_sql_type]}) #{pk_and_type[:quoted]}" }.join(", ")} FROM @ssaIdInsertTable + SQL else - raise + returning_columns = returning || Array(pk) + + if returning_columns.any? + returning_columns_statements = returning_columns.map { |c| " INSERTED.#{SQLServer::Utils.extract_identifiers(c).quoted}" } + sql.dup.insert sql.index(/ (DEFAULT )?VALUES/i), " OUTPUT" + returning_columns_statements.join(",") + else + sql + end end + else + "#{sql}; SELECT CAST(SCOPE_IDENTITY() AS bigint) AS Ident" end - end - - def create_database(database) - do_execute "CREATE DATABASE #{quote_table_name(database)}" - end - def current_database - select_value 'SELECT DB_NAME()' - end - - def charset - select_value "SELECT SERVERPROPERTY('SqlCharSetName')" - end - - - protected - - def select(sql, name = nil, binds = []) - exec_query(sql, name, binds).to_a - end - - def sql_for_insert(sql, pk, id_value, sequence_name, binds) - sql = "#{sql}; SELECT CAST(SCOPE_IDENTITY() AS bigint) AS Ident"# unless binds.empty? - super + [sql, binds] end - def last_inserted_id(result) - super || select_value("SELECT CAST(SCOPE_IDENTITY() AS bigint) AS Ident") - end - # === SQLServer Specific ======================================== # - - def valid_isolation_levels - ["READ COMMITTED", "READ UNCOMMITTED", "REPEATABLE READ", "SERIALIZABLE", "SNAPSHOT"] + + def set_identity_insert(table_name, conn, enable) + internal_raw_execute("SET IDENTITY_INSERT #{table_name} #{enable ? "ON" : "OFF"}", conn, perform_do: true) + rescue + raise ActiveRecordError, "IDENTITY_INSERT could not be turned #{enable ? "ON" : "OFF"} for table #{table_name}" end - + # === SQLServer Specific (Executing) ============================ # - def do_execute(sql, name = 'SQL') - log(sql, name) do - with_sqlserver_error_handling { raw_connection_do(sql) } + def sp_executesql_types_and_parameters(binds) + types, params = [], [] + binds.each_with_index do |attr, index| + attr = attr.value if attr.is_a?(Arel::Nodes::BindParam) + + types << "@#{index} #{sp_executesql_sql_type(attr)}" + params << sp_executesql_sql_param(attr) end + [types, params] end - - def do_exec_query(sql, name, binds) - explaining = name == 'EXPLAIN' - names_and_types = [] - params = [] - binds.each_with_index do |(column,value),index| - ar_column = column.is_a?(ActiveRecord::ConnectionAdapters::Column) - next if ar_column && column.sql_type == 'timestamp' - v = value - names_and_types << if ar_column - v = value.to_i if column.is_integer? && value.present? - "@#{index} #{column.sql_type_for_statement}" - elsif column.acts_like?(:string) - "@#{index} nvarchar(max)" - elsif column.is_a?(Fixnum) - v = value.to_i - "@#{index} int" - else - raise "Unknown bind columns. We can account for this." - end - quoted_value = ar_column ? quote(v,column) : quote(v,nil) - params << (explaining ? quoted_value : "@#{index} = #{quoted_value}") + + def sp_executesql_sql_type(attr) + if attr.respond_to?(:type) + type = attr.type.is_a?(ActiveModel::Attributes::Normalization::NormalizedValueType) ? attr.type.cast_type : attr.type + type = type.subtype if type.serialized? + + return type.sqlserver_type if type.respond_to?(:sqlserver_type) + + if type.is_a?(ActiveRecord::Encryption::EncryptedAttributeType) && type.instance_variable_get(:@cast_type).respond_to?(:sqlserver_type) + return type.instance_variable_get(:@cast_type).sqlserver_type + end + end + + value = active_model_attribute?(attr) ? attr.value_for_database : attr + + if value.is_a?(Numeric) + (value > 2_147_483_647) ? "bigint" : "int" + else + "nvarchar(max)" end - if explaining - params.each_with_index do |param, index| + end + + def sp_executesql_sql_param(attr) + return quote(attr) unless active_model_attribute?(attr) + + case value = attr.value_for_database + when Type::Binary::Data, ActiveRecord::Type::SQLServer::Data + quote(value) + else + quote(type_cast(value)) + end + end + + def active_model_attribute?(type) + type.is_a?(::ActiveModel::Attribute) + end + + def sp_executesql_sql(sql, types, params, name) + if name == "EXPLAIN" + params.each.with_index do |param, index| substitute_at_finder = /(@#{index})(?=(?:[^']|'[^']*')*$)/ # Finds unquoted @n values. - sql.sub! substitute_at_finder, param + sql = sql.sub substitute_at_finder, param.to_s end else + types = quote(types.join(", ")) + params = params.map.with_index { |p, i| "@#{i} = #{p}" }.join(", ") # Only p is needed, but with @i helps explain regexp. sql = "EXEC sp_executesql #{quote(sql)}" - sql << ", #{quote(names_and_types.join(', '))}, #{params.join(', ')}" unless binds.empty? + sql += ", #{types}, #{params}" unless params.empty? end - raw_select sql, name, binds, :ar_result => true - end - - def raw_connection_do(sql) - case @connection_options[:mode] - when :dblib - @connection.execute(sql).do - when :odbc - @connection.do(sql) - end - ensure - @update_sql = false + + sql.freeze end - - # === SQLServer Specific (Selecting) ============================ # - def raw_select(sql, name='SQL', binds=[], options={}) - log(sql,name,binds) { _raw_select(sql, options) } + # === SQLServer Specific (Identity Inserts) ===================== # + + def use_output_inserted? + self.class.use_output_inserted end - - def _raw_select(sql, options={}) - begin - handle = raw_connection_run(sql) - handle_to_names_and_values(handle, options) - ensure - finish_statement_handle(handle) - end + + def exclude_output_inserted_table_names? + !self.class.exclude_output_inserted_table_names.empty? end - - def raw_connection_run(sql) - with_sqlserver_error_handling do - case @connection_options[:mode] - when :dblib - @connection.execute(sql) - when :odbc - block_given? ? @connection.run_block(sql) { |handle| yield(handle) } : @connection.run(sql) - end - end + + def exclude_output_inserted_table_name?(table_name, sql) + return false unless exclude_output_inserted_table_names? + + table_name ||= get_table_name(sql) + return false unless table_name + + self.class.exclude_output_inserted_table_names[table_name] end - - def handle_more_results?(handle) - case @connection_options[:mode] - when :dblib - when :odbc - handle.more_results - end + + def exclude_output_inserted_id_sql_type(pk, exclude_output_inserted) + return "bigint" if exclude_output_inserted.is_a?(TrueClass) + return exclude_output_inserted[pk.to_sym] if exclude_output_inserted.is_a?(Hash) + exclude_output_inserted end - - def handle_to_names_and_values(handle, options={}) - case @connection_options[:mode] - when :dblib - handle_to_names_and_values_dblib(handle, options) - when :odbc - handle_to_names_and_values_odbc(handle, options) + + def query_requires_identity_insert?(sql) + return false unless insert_sql?(sql) + + raw_table_name = get_raw_table_name(sql) + id_column = identity_columns(raw_table_name).first + + if id_column && ( + sql =~ /^\s*(INSERT|EXEC sp_executesql N'INSERT)[^(]+\([^)]*\b(#{id_column.name})\b,?[^)]*\)/i || + sql =~ /^\s*MERGE INTO.+THEN INSERT \([^)]*\b(#{id_column.name})\b,?[^)]*\)/im + ) + SQLServer::Utils.extract_identifiers(raw_table_name).quoted + else + false end end - - def handle_to_names_and_values_dblib(handle, options={}) + + def insert_sql?(sql) + !(sql =~ /\A\s*(INSERT|EXEC sp_executesql N'INSERT|MERGE INTO.+THEN INSERT)/im).nil? + end + + def identity_columns(table_name) + schema_cache.columns(table_name).select(&:is_identity?) + end + + # === SQLServer Specific (Selecting) ============================ # + + def _raw_select(sql, conn) + handle = internal_raw_execute(sql, conn) + handle_to_names_and_values(handle, fetch: :rows) + ensure + finish_statement_handle(handle) + end + + def handle_to_names_and_values(handle, options = {}) query_options = {}.tap do |qo| - qo[:timezone] = ActiveRecord::Base.default_timezone || :utc + qo[:timezone] = ActiveRecord.default_timezone || :utc qo[:as] = (options[:ar_result] || options[:fetch] == :rows) ? :array : :hash end results = handle.each(query_options) - columns = lowercase_schema_reflection ? handle.fields.map { |c| c.downcase } : handle.fields - options[:ar_result] ? ActiveRecord::Result.new(columns, results) : results - end - - def handle_to_names_and_values_odbc(handle, options={}) - @connection.use_utc = ActiveRecord::Base.default_timezone == :utc + if options[:ar_result] - columns = lowercase_schema_reflection ? handle.columns(true).map { |c| c.name.downcase } : handle.columns(true).map { |c| c.name } - rows = handle.fetch_all || [] - ActiveRecord::Result.new(columns, rows) + columns = handle.fields + columns = columns.last if columns.any? && columns.all? { |e| e.is_a?(Array) } # If query returns multiple result sets, only return the columns of the last one. + columns = columns.map(&:downcase) if lowercase_schema_reflection + + ActiveRecord::Result.new(columns, results, affected_rows: handle.affected_rows) else - case options[:fetch] - when :all - handle.each_hash || [] - when :rows - handle.fetch_all || [] - end + results end end - + def finish_statement_handle(handle) - case @connection_options[:mode] - when :dblib - handle.cancel if handle - when :odbc - handle.drop if handle && handle.respond_to?(:drop) && !handle.finished? - end + handle&.cancel handle end - + + def internal_raw_execute(sql, raw_connection, perform_do: false) + result = raw_connection.execute(sql) + perform_do ? result.do : result + end + + # === SQLServer Specific (insert_all / upsert_all support) ===================== # + def build_sql_for_returning(insert:, insert_all:) + return "" unless insert_all.returning + + returning_values_sql = if insert_all.returning.is_a?(String) + insert_all.returning + else + Array(insert_all.returning).map do |attribute| + if insert.model.attribute_alias?(attribute) + "INSERTED.#{quote_column_name(insert.model.attribute_alias(attribute))} AS #{quote_column_name(attribute)}" + else + "INSERTED.#{quote_column_name(attribute)}" + end + end.join(",") + end + + " OUTPUT #{returning_values_sql}" + end + private :build_sql_for_returning + + def get_columns_with_uniqueness_constraints(insert_all:, insert:) + if (unique_by = insert_all.unique_by) + [unique_by.columns] + else + # Compare against every unique constraint (primary key included). + # Discard constraints that are not fully included on insert.keys. Prevents invalid queries. + # Example: ignore unique index for columns ["name"] if insert keys is ["description"] + (insert_all.send(:unique_indexes).map(&:columns) + [insert_all.primary_keys]).select do |columns| + columns.to_set.subset?(insert.keys) + end + end + end + private :get_columns_with_uniqueness_constraints + + def build_sql_for_regular_insert(insert:) + sql = "INSERT #{insert.into}" + sql << build_sql_for_returning(insert:, insert_all: insert.send(:insert_all)) + sql << " #{insert.values_list}" + sql + end + private :build_sql_for_regular_insert + + # why is the "PARTITION BY" clause needed? + # in every DBMS system, insert_all / upsert_all is usually implemented with INSERT, that allows to define what happens + # when duplicates are found (SKIP OR UPDATE) + # by default rows are considered to be unique by every unique index on the table + # but since we have to use MERGE in MSSQL, which in return is a JOIN, we have to perform the "de-duplication" ourselves + # otherwise the "JOIN" clause would complain about non-unique values and being unable to JOIN the two tables + # this works easiest by using PARTITION and make sure that any record + # we are trying to insert is "the first one seen across all the potential columns with uniqueness constraints" + def partition_by_columns_with_uniqueness_constraints(columns_with_uniqueness_constraints:) + columns_with_uniqueness_constraints.map.with_index do |group_of_columns_with_uniqueness_constraints, index| + <<~PARTITION_BY + ROW_NUMBER() OVER ( + PARTITION BY #{group_of_columns_with_uniqueness_constraints.map { |column| quote_column_name(column) }.join(",")} + ORDER BY #{group_of_columns_with_uniqueness_constraints.map { |column| "#{quote_column_name(column)} DESC" }.join(",")} + ) AS rn_#{index} + PARTITION_BY + end.join(", ") + end + private :partition_by_columns_with_uniqueness_constraints + + def is_first_record_across_all_uniqueness_constraints(columns_with_uniqueness_constraints:) + columns_with_uniqueness_constraints.map.with_index do |group_of_columns_with_uniqueness_constraints, index| + "rn_#{index} = 1" + end.join(" AND ") + end + private :is_first_record_across_all_uniqueness_constraints + + def joining_on_columns_with_uniqueness_constraints(columns_with_uniqueness_constraints:) + columns_with_uniqueness_constraints.map do |columns| + columns.map do |column| + "target.#{quote_column_name(column)} = source.#{quote_column_name(column)}" + end.join(" AND ") + end.join(") OR (") + end + private :joining_on_columns_with_uniqueness_constraints + + # normally, generating the CASE SQL is done entirely by Rails + # and you would just hook into "touch_model_timestamps_unless" to add your database-specific instructions + # however, since we need to have "target." for the assignment, we also generate the CASE switch ourselves + def build_sql_for_recording_timestamps_when_updating(insert:) + insert.model.timestamp_attributes_for_update_in_model.filter_map do |column_name| + if insert.send(:touch_timestamp_attribute?, column_name) + "target.#{quote_column_name(column_name)}=CASE WHEN (#{insert.updatable_columns.map { |column| "(source.#{quote_column_name(column)} = target.#{quote_column_name(column)} OR (source.#{quote_column_name(column)} IS NULL AND target.#{quote_column_name(column)} IS NULL))" }.join(" AND ")}) THEN target.#{quote_column_name(column_name)} ELSE #{high_precision_current_timestamp} END," + end + end.join + end + private :build_sql_for_recording_timestamps_when_updating end end end diff --git a/lib/active_record/connection_adapters/sqlserver/database_tasks.rb b/lib/active_record/connection_adapters/sqlserver/database_tasks.rb new file mode 100644 index 000000000..28bc15da7 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/database_tasks.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module DatabaseTasks + def create_database(database, options = {}) + name = SQLServer::Utils.extract_identifiers(database) + db_options = create_database_options(options) + edition_options = create_database_edition_options(options) + execute "CREATE DATABASE #{name} #{db_options} #{edition_options}" + end + + def drop_database(database) + name = SQLServer::Utils.extract_identifiers(database) + execute "ALTER DATABASE #{name} SET SINGLE_USER WITH ROLLBACK IMMEDIATE" + execute "DROP DATABASE #{name}" + end + + def current_database + select_value "SELECT DB_NAME()" + end + + def charset + select_value "SELECT DATABASEPROPERTYEX(DB_NAME(), 'SqlCharSetName')" + end + + def collation + @collation ||= select_value "SELECT DATABASEPROPERTYEX(DB_NAME(), 'Collation')" + end + + private + + def create_database_options(options = {}) + keys = [:collate] + copts = @connection_parameters + { + collate: copts[:collation] + }.merge(options.symbolize_keys).select { |_, v| + v.present? + }.slice(*keys).map { |k, v| + "#{k.to_s.upcase} #{v}" + }.join(" ") + end + + def create_database_edition_options(options = {}) + keys = [:maxsize, :edition, :service_objective] + copts = @connection_parameters + edition_options = { + maxsize: copts[:azure_maxsize], + edition: copts[:azure_edition], + service_objective: copts[:azure_service_objective] + }.merge(options.symbolize_keys).select { |_, v| + v.present? + }.slice(*keys).map { |k, v| + "#{k.to_s.upcase} = #{v}" + }.join(", ") + edition_options = "( #{edition_options} )" if edition_options.present? + edition_options + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/errors.rb b/lib/active_record/connection_adapters/sqlserver/errors.rb index 9de1f8e19..5d0e358f7 100644 --- a/lib/active_record/connection_adapters/sqlserver/errors.rb +++ b/lib/active_record/connection_adapters/sqlserver/errors.rb @@ -1,36 +1,6 @@ +# frozen_string_literal: true + module ActiveRecord - - class LostConnection < WrappedDatabaseException - end - class DeadlockVictim < WrappedDatabaseException end - - module ConnectionAdapters - module Sqlserver - module Errors - - LOST_CONNECTION_EXCEPTIONS = { - :dblib => ['TinyTds::Error'], - :odbc => ['ODBC::Error'] - }.freeze - - LOST_CONNECTION_MESSAGES = { - :dblib => [/closed connection/, /dead or not enabled/, /server failed/i], - :odbc => [/link failure/, /server failed/, /connection was already closed/, /invalid handle/i] - }.freeze - - - def lost_connection_exceptions - exceptions = LOST_CONNECTION_EXCEPTIONS[@connection_options[:mode]] - @lost_connection_exceptions ||= exceptions ? exceptions.map{ |e| e.constantize rescue nil }.compact : [] - end - - def lost_connection_messages - LOST_CONNECTION_MESSAGES[@connection_options[:mode]] - end - - end - end - end end diff --git a/lib/active_record/connection_adapters/sqlserver/quoting.rb b/lib/active_record/connection_adapters/sqlserver/quoting.rb index 02776b81e..5f609aaa0 100644 --- a/lib/active_record/connection_adapters/sqlserver/quoting.rb +++ b/lib/active_record/connection_adapters/sqlserver/quoting.rb @@ -1,112 +1,138 @@ +# frozen_string_literal: true + module ActiveRecord module ConnectionAdapters - module Sqlserver + module SQLServer module Quoting - - QUOTED_TRUE, QUOTED_FALSE = '1', '0' - QUOTED_STRING_PREFIX = 'N' - - def quote(value, column = nil) - case value - when String, ActiveSupport::Multibyte::Chars - if column && column.type == :integer && value.blank? - value.to_i.to_s - elsif column && column.type == :binary - column.class.string_to_binary(value) - elsif value.is_utf8? || (column && column.type == :string) - "#{quoted_string_prefix}'#{quote_string(value)}'" - else - super - end - when Date, Time - if column && column.sql_type == 'datetime' - "'#{quoted_datetime(value)}'" - elsif column && (column.sql_type == 'datetimeoffset' || column.sql_type == 'time') - "'#{quoted_full_iso8601(value)}'" - else - super - end - when nil - column.respond_to?(:sql_type) && column.sql_type == 'timestamp' ? 'DEFAULT' : super - else - super + extend ActiveSupport::Concern + + QUOTED_COLUMN_NAMES = Concurrent::Map.new # :nodoc: + QUOTED_TABLE_NAMES = Concurrent::Map.new # :nodoc: + + module ClassMethods + def column_name_matcher + / + \A + ( + (?: + # [database_name].[database_owner].[table_name].[column_name] | function(one or no argument) + ((?:\w+\.|\[\w+\]\.)?(?:\w+\.|\[\w+\]\.)?(?:\w+\.|\[\w+\]\.)?(?:\w+|\[\w+\]) | \w+\((?:|\g<2>)\)) + ) + (?:\s+AS\s+(?:\w+|\[\w+\]))? + ) + (?:\s*,\s*\g<1>)* + \z + /ix + end + + def column_name_with_order_matcher + / + \A + ( + (?: + # [database_name].[database_owner].[table_name].[column_name] | function(one or no argument) + ((?:\w+\.|\[\w+\]\.)?(?:\w+\.|\[\w+\]\.)?(?:\w+\.|\[\w+\]\.)?(?:\w+|\[\w+\]) | \w+\((?:|\g<2>)\)) + ) + (?:\s+COLLATE\s+\w+)? + (?:\s+ASC|\s+DESC)? + (?:\s+NULLS\s+(?:FIRST|LAST))? + ) + (?:\s*,\s*\g<1>)* + \z + /ix + end + + def quote_column_name(name) + QUOTED_COLUMN_NAMES[name] ||= SQLServer::Utils.extract_identifiers(name).quoted + end + + def quote_table_name(name) + QUOTED_TABLE_NAMES[name] ||= SQLServer::Utils.extract_identifiers(name).quoted end end - - def quoted_string_prefix - QUOTED_STRING_PREFIX + + def fetch_type_metadata(sql_type, sqlserver_options = {}) + cast_type = lookup_cast_type(sql_type) + + simple_type = SqlTypeMetadata.new( + sql_type: sql_type, + type: cast_type.type, + limit: cast_type.limit, + precision: cast_type.precision, + scale: cast_type.scale + ) + + SQLServer::TypeMetadata.new(simple_type, **sqlserver_options) end - - def quote_string(string) - string.to_s.gsub(/\'/, "''") + + def quote_string(s) + SQLServer::Utils.quote_string(s) end - def quote_column_name(name) - schema_cache.quote_name(name) + def quote_string_single(s) + SQLServer::Utils.quote_string_single(s) end - def quote_table_name(name) - quote_column_name(name) + def quote_string_single_national(s) + SQLServer::Utils.quote_string_single_national(s) end - - def substitute_at(column, index) - if column.respond_to?(:sql_type) && column.sql_type == 'timestamp' - nil + + def quote_default_expression(value, column) + cast_type = lookup_cast_type(column.sql_type) + if cast_type.type == :uuid && value.is_a?(String) && value.include?("()") + value else - Arel.sql "@#{index}" + super end end def quoted_true - QUOTED_TRUE + "1" + end + + def unquoted_true + 1 end def quoted_false - QUOTED_FALSE + "0" end - def quoted_datetime(value) + def unquoted_false + 0 + end + + def quoted_date(value) if value.acts_like?(:time) - time_zone_qualified_value = quoted_value_acts_like_time_filter(value) - if value.is_a?(Date) - time_zone_qualified_value.to_time.xmlschema.to(18) - else - # CHANGED [Ruby 1.8] Not needed when 1.8 is dropped. - if value.is_a?(ActiveSupport::TimeWithZone) && RUBY_VERSION < '1.9' - time_zone_qualified_value = time_zone_qualified_value.to_time - end - time_zone_qualified_value.iso8601(3).to(22) - end + Type::DateTime.new.serialize(value) + elsif value.acts_like?(:date) + Type::Date.new.serialize(value) else - quoted_date(value) + value end end - - def quoted_full_iso8601(value) - if value.acts_like?(:time) - value.is_a?(Date) ? quoted_value_acts_like_time_filter(value).to_time.xmlschema.to(18) : quoted_value_acts_like_time_filter(value).iso8601(7).to(22) + + def quote(value) + case value + when Type::Binary::Data + "0x#{value.hex}" + when ActiveRecord::Type::SQLServer::Data + value.quoted + when String, ActiveSupport::Multibyte::Chars + "N#{super}" else - quoted_date(value) + super end end - def quoted_date(value) - if value.acts_like?(:time) && value.respond_to?(:usec) - "#{super}.#{sprintf("%03d",value.usec/1000)}" - elsif value.acts_like?(:date) - value.to_s(:_sqlserver_dateformat) + def type_cast(value) + case value + when ActiveRecord::Type::SQLServer::Data + value.to_s else super end end - - protected - - def quoted_value_acts_like_time_filter(value) - zone_conversion_method = ActiveRecord::Base.default_timezone == :utc ? :getutc : :getlocal - value.respond_to?(zone_conversion_method) ? value.send(zone_conversion_method) : value - end - end end end diff --git a/lib/active_record/connection_adapters/sqlserver/savepoints.rb b/lib/active_record/connection_adapters/sqlserver/savepoints.rb new file mode 100644 index 000000000..915cd07bb --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/savepoints.rb @@ -0,0 +1,26 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Savepoints + def current_savepoint_name + current_transaction.savepoint_name + end + + def create_savepoint(name = current_savepoint_name) + internal_execute("SAVE TRANSACTION #{name}", "TRANSACTION") + end + + def exec_rollback_to_savepoint(name = current_savepoint_name) + internal_execute("ROLLBACK TRANSACTION #{name}", "TRANSACTION") + end + + # SQL Server does require save-points to be explicitly released. + # See https://stackoverflow.com/questions/3101312/sql-server-2008-no-release-savepoint-for-current-transaction + def release_savepoint(_name) + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/schema_cache.rb b/lib/active_record/connection_adapters/sqlserver/schema_cache.rb deleted file mode 100644 index 280048e77..000000000 --- a/lib/active_record/connection_adapters/sqlserver/schema_cache.rb +++ /dev/null @@ -1,85 +0,0 @@ -module ActiveRecord - module ConnectionAdapters - module Sqlserver - class SchemaCache < ActiveRecord::ConnectionAdapters::SchemaCache - - attr_reader :view_information - - def initialize(conn) - super - @table_names = nil - @view_names = nil - @view_information = {} - @quoted_names = {} - end - - # Superclass Overrides - - def table_exists?(table_name) - return false if table_name.blank? - key = table_name_key(table_name) - return @tables[key] if @tables.key? key - @tables[key] = connection.table_exists?(table_name) - end - - def clear! - super - @table_names = nil - @view_names = nil - @view_information.clear - @quoted_names.clear - end - - def clear_table_cache!(table_name) - key = table_name_key(table_name) - super(key) - super(table_name) - # SQL Server Specific - if @table_names - @table_names.delete key - @table_names.delete table_name - end - if @view_names - @view_names.delete key - @view_names.delete table_name - end - @view_information.delete key - end - - # SQL Server Specific - - def table_names - @table_names ||= connection.tables - end - - def view_names - @view_names ||= connection.views - end - - def view_exists?(table_name) - table_exists?(table_name) - end - - def view_information(table_name) - key = table_name_key(table_name) - return @view_information[key] if @view_information.key? key - @view_information[key] = connection.send(:view_information, table_name) - end - - def quote_name(name) - return @quoted_names[name] if @quoted_names.key? name - @quoted_names[name] = name.to_s.split('.').map{ |n| n =~ /^\[.*\]$/ ? n : "[#{n.to_s.gsub(']', ']]')}]" }.join('.') - end - - - private - - def table_name_key(table_name) - Utils.unqualify_table_name(table_name) - end - - end - end - end -end - diff --git a/lib/active_record/connection_adapters/sqlserver/schema_creation.rb b/lib/active_record/connection_adapters/sqlserver/schema_creation.rb new file mode 100644 index 000000000..c7661a47d --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/schema_creation.rb @@ -0,0 +1,103 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + class SchemaCreation < SchemaCreation + private + + delegate :quoted_include_columns_for_index, to: :@conn + + def supports_index_using? + false + end + + def visit_ColumnDefinition(o) + column_sql = super + column_sql = column_sql.sub(" #{o.sql_type}", "") if o.options[:as].present? + column_sql + end + + def visit_TableDefinition(o) + if_not_exists = o.if_not_exists + + if o.as + table_name = quote_table_name(o.temporary ? "##{o.name}" : o.name) + query = o.as.respond_to?(:to_sql) ? o.as.to_sql : o.as + projections, source = query.match(%r{SELECT\s+(.*)?\s+FROM\s+(.*)?}).captures + sql = "SELECT #{projections} INTO #{table_name} FROM #{source}" + else + o.instance_variable_set :@as, nil + o.instance_variable_set :@if_not_exists, false + sql = super + end + + if if_not_exists + o.instance_variable_set :@if_not_exists, true + table_name = o.temporary ? "##{o.name}" : o.name + sql = "IF NOT EXISTS (SELECT * FROM sysobjects WHERE name='#{table_name}' and xtype='U') #{sql}" + end + + sql + end + + def visit_CreateIndexDefinition(o) + index = o.index + + sql = [] + sql << "IF NOT EXISTS (SELECT name FROM sysindexes WHERE name = '#{o.index.name}')" if o.if_not_exists + sql << "CREATE" + sql << "UNIQUE" if index.unique + sql << index.type.upcase if index.type + sql << "INDEX" + sql << "#{quote_column_name(index.name)} ON #{quote_table_name(index.table)}" + sql << "(#{quoted_columns(index)})" + sql << "INCLUDE (#{quoted_include_columns(index.include)})" if supports_index_include? && index.include + sql << "WHERE #{index.where}" if index.where + + sql.join(" ") + end + + def quoted_include_columns(o) + (String === o) ? o : quoted_include_columns_for_index(o) + end + + def add_column_options!(sql, options) + sql << " DEFAULT #{quote_default_expression_for_column_definition(options[:default], options[:column])}" if options_include_default?(options) + + sql << " COLLATE #{options[:collation]}" if options[:collation].present? + sql << " NOT NULL" if options[:null] == false + sql << " IDENTITY(1,1)" if options[:is_identity] == true + sql << " PRIMARY KEY" if options[:primary_key] == true + + if (as = options[:as]) + sql << " AS #{as}" + sql << " PERSISTED" if options[:stored] + end + + sql + end + + def action_sql(action, dependency) + case dependency + when :restrict + raise ArgumentError, <<~MSG.squish + '#{dependency}' is not supported for :on_update or :on_delete. + Supported values are: :nullify, :cascade + MSG + else + super + end + end + + def options_include_default?(options) + super || options_primary_key_with_nil_default?(options) + end + + def options_primary_key_with_nil_default?(options) + options[:primary_key] && options.include?(:default) && options[:default].nil? + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/schema_dumper.rb b/lib/active_record/connection_adapters/sqlserver/schema_dumper.rb new file mode 100644 index 000000000..775754b5b --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/schema_dumper.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + class SchemaDumper < ConnectionAdapters::SchemaDumper + SQLSERVER_NO_LIMIT_TYPES = %w[text ntext varchar(max) nvarchar(max) varbinary(max)].freeze + + private + + def prepare_column_options(column) + spec = super + + if @connection.supports_virtual_columns? && column.virtual? + spec[:as] = extract_expression_for_virtual_column(column) + spec[:stored] = column.virtual_stored? + end + + spec + end + + def extract_expression_for_virtual_column(column) + column.default_function.inspect + end + + def explicit_primary_key_default?(column) + column.type == :integer && !column.is_identity? + end + + def schema_limit(column) + return if SQLSERVER_NO_LIMIT_TYPES.include?(column.sql_type) + + super + end + + def schema_collation(column) + return unless column.collation + + # use inspect to ensure collation is dumped as string. Without this it's dumped as + # a constant ('collation: SQL_Latin1_General_CP1_CI_AS') + collation = column.collation.inspect + # use inspect to ensure string comparison + default_collation = @connection.collation.inspect + + collation if collation != default_collation + end + + def default_primary_key?(column) + super && column.is_identity? + end + + def schemas(stream) + schema_names = @connection.schema_names + + if schema_names.any? + schema_names.sort.each do |name| + stream.puts " create_schema #{name.inspect}" + end + stream.puts + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/schema_statements.rb b/lib/active_record/connection_adapters/sqlserver/schema_statements.rb index df7e78235..d5a28f438 100644 --- a/lib/active_record/connection_adapters/sqlserver/schema_statements.rb +++ b/lib/active_record/connection_adapters/sqlserver/schema_statements.rb @@ -1,365 +1,788 @@ +# frozen_string_literal: true + module ActiveRecord module ConnectionAdapters - module Sqlserver + module SQLServer module SchemaStatements - - def native_database_types - @native_database_types ||= initialize_native_database_types.freeze + def create_table(table_name, **options) + res = super + clear_cache! + res end - def tables(table_type = 'BASE TABLE') - select_values "SELECT #{lowercase_schema_reflection_sql('TABLE_NAME')} FROM INFORMATION_SCHEMA.TABLES #{"WHERE TABLE_TYPE = '#{table_type}'" if table_type} ORDER BY TABLE_NAME", 'SCHEMA' + def drop_table(*table_names, **options) + table_names.each do |table_name| + # Mimic CASCADE option as best we can. + if options[:force] == :cascade + execute_procedure(:sp_fkeys, pktable_name: table_name).each do |fkdata| + fktable = fkdata["FKTABLE_NAME"] + fkcolmn = fkdata["FKCOLUMN_NAME"] + pktable = fkdata["PKTABLE_NAME"] + pkcolmn = fkdata["PKCOLUMN_NAME"] + remove_foreign_key fktable, name: fkdata["FK_NAME"] + execute "DELETE FROM #{quote_table_name(fktable)} WHERE #{quote_column_name(fkcolmn)} IN ( SELECT #{quote_column_name(pkcolmn)} FROM #{quote_table_name(pktable)} )" + end + end + if options[:if_exists] && version_year < 2016 + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = #{quote(table_name)}) DROP TABLE #{quote_table_name(table_name)}", "SCHEMA" + else + super + end + end end - def table_exists?(table_name) - return false if table_name.blank? - unquoted_table_name = Utils.unqualify_table_name(table_name) - super || tables.include?(unquoted_table_name) || views.include?(unquoted_table_name) - end + def indexes(table_name) + data = begin + select("EXEC sp_helpindex #{quote(table_name)}", "SCHEMA") + rescue + [] + end - def indexes(table_name, name = nil) - data = select("EXEC sp_helpindex #{quote(table_name)}",name) rescue [] - data.inject([]) do |indexes,index| - index = index.with_indifferent_access - if index[:index_description] =~ /primary key/ + data.reduce([]) do |indexes, index| + if index["index_description"].match?(/primary key/) indexes else - name = index[:index_name] - unique = index[:index_description] =~ /unique/ - columns = index[:index_keys].split(',').map do |column| + name = index["index_name"] + unique = index["index_description"].match?(/unique/) + where = select_value("SELECT [filter_definition] FROM sys.indexes WHERE name = #{quote(name)}", "SCHEMA") + include_columns = index_include_columns(table_name, name) + orders = {} + columns = [] + + index["index_keys"].split(",").each do |column| column.strip! - column.gsub! '(-)', '' if column.ends_with?('(-)') - column + + if column.end_with?("(-)") + column.gsub! "(-)", "" + orders[column] = :desc + end + + columns << column end - indexes << IndexDefinition.new(table_name, name, unique, columns) + + indexes << IndexDefinition.new(table_name, name, unique, columns, where: where, orders: orders, include: include_columns.presence) end end end - def columns(table_name, name = nil) + def index_include_columns(table_name, index_name) + sql = <<~SQL + SELECT + ic.index_id, + c.name AS column_name + FROM + sys.indexes i + JOIN + sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id + JOIN + sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id + WHERE + i.object_id = OBJECT_ID('#{table_name}') + AND i.name = '#{index_name}' + AND ic.is_included_column = 1; + SQL + + select_all(sql, "SCHEMA").map { |row| row["column_name"] } + end + + def columns(table_name) return [] if table_name.blank? - column_definitions(table_name).collect do |ci| - sqlserver_options = ci.except(:name,:default_value,:type,:null).merge(:database_year=>database_year) - SQLServerColumn.new ci[:name], ci[:default_value], ci[:type], ci[:null], sqlserver_options + + definitions = column_definitions(table_name) + definitions.map do |field| + new_column_from_field(table_name, field, definitions) end end - - def rename_table(table_name, new_name) - do_execute "EXEC sp_rename '#{table_name}', '#{new_name}'" + + def new_column_from_field(_table_name, field, _definitions) + sqlserver_options = field.slice(:ordinal_position, :is_primary, :is_identity, :table_name) + sql_type_metadata = fetch_type_metadata(field[:type], sqlserver_options) + generated_type = extract_generated_type(field) + + default_function = if generated_type.present? + field[:computed_formula] + else + field[:default_function] + end + + SQLServer::Column.new( + field[:name], + lookup_cast_type(field[:type]), + field[:default_value], + sql_type_metadata, + field[:null], + default_function, + collation: field[:collation], + comment: nil, + generated_type: generated_type, + **sqlserver_options + ) end - - def remove_column(table_name, *column_names) - raise ArgumentError.new("You must specify at least one column name. Example: remove_column(:people, :first_name)") if column_names.empty? - ActiveSupport::Deprecation.warn 'Passing array to remove_columns is deprecated, please use multiple arguments, like: `remove_columns(:posts, :foo, :bar)`', caller if column_names.flatten! - column_names.flatten.each do |column_name| - remove_check_constraints(table_name, column_name) - remove_default_constraint(table_name, column_name) - remove_indexes(table_name, column_name) - do_execute "ALTER TABLE #{quote_table_name(table_name)} DROP COLUMN #{quote_column_name(column_name)}" + + def extract_generated_type(field) + if field[:is_computed] + if field[:is_persisted] + :stored + else + :virtual + end end end + def primary_keys(table_name) + primaries = primary_keys_select(table_name) + primaries.present? ? primaries : identity_columns(table_name).map(&:name) + end + + def primary_keys_select(table_name) + identifier = database_prefix_identifier(table_name) + database = identifier.fully_qualified_database_quoted + sql = %( + SELECT #{lowercase_schema_reflection_sql("KCU.COLUMN_NAME")} AS [name] + FROM #{database}.INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KCU + LEFT OUTER JOIN #{database}.INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC + ON KCU.CONSTRAINT_NAME = TC.CONSTRAINT_NAME + AND KCU.CONSTRAINT_NAME = TC.CONSTRAINT_NAME + AND KCU.CONSTRAINT_CATALOG = TC.CONSTRAINT_CATALOG + AND KCU.CONSTRAINT_SCHEMA = TC.CONSTRAINT_SCHEMA + AND TC.CONSTRAINT_TYPE = N'PRIMARY KEY' + WHERE KCU.TABLE_NAME = #{prepared_statements ? "@0" : quote(identifier.object)} + AND KCU.TABLE_SCHEMA = #{if identifier.schema.blank? + "schema_name()" + else + (prepared_statements ? "@1" : quote(identifier.schema)) + end} + AND TC.CONSTRAINT_TYPE = N'PRIMARY KEY' + ORDER BY KCU.ORDINAL_POSITION ASC + ).gsub(/[[:space:]]/, " ") + + binds = [] + nv128 = SQLServer::Type::UnicodeVarchar.new limit: 128 + binds << Relation::QueryAttribute.new("TABLE_NAME", identifier.object, nv128) + binds << Relation::QueryAttribute.new("TABLE_SCHEMA", identifier.schema, nv128) unless identifier.schema.blank? + + internal_exec_query(sql, "SCHEMA", binds).map { |row| row["name"] } + end + + def rename_table(table_name, new_name, **options) + validate_table_length!(new_name) unless options[:_uses_legacy_table_name] + schema_cache.clear_data_source_cache!(table_name.to_s) + schema_cache.clear_data_source_cache!(new_name.to_s) + execute "EXEC sp_rename '#{table_name}', '#{new_name}'" + rename_table_indexes(table_name, new_name, **options) + end + + def remove_column(table_name, column_name, type = nil, **options) + raise ArgumentError.new("You must specify at least one column name. Example: remove_column(:people, :first_name)") if column_name.is_a? Array + return if options[:if_exists] == true && !column_exists?(table_name, column_name) + + remove_check_constraints(table_name, column_name) + remove_default_constraint(table_name, column_name) + remove_indexes(table_name, column_name) + execute "ALTER TABLE #{quote_table_name(table_name)} DROP COLUMN #{quote_column_name(column_name)}" + end + def change_column(table_name, column_name, type, options = {}) sql_commands = [] - column_object = schema_cache.columns[table_name].detect { |c| c.name.to_s == column_name.to_s } - change_column_sql = "ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}" - change_column_sql << " NOT NULL" if options[:null] == false - sql_commands << change_column_sql - if options_include_default?(options) || (column_object && column_object.type != type.to_sym) - remove_default_constraint(table_name,column_name) + indexes = [] + + if type == :datetime + # If no precision then default it to 6. + options[:precision] = 6 unless options.key?(:precision) + + # If there is precision then column must be of type 'datetime2'. + type = :datetime2 unless options[:precision].nil? end - if options_include_default?(options) - sql_commands << "ALTER TABLE #{quote_table_name(table_name)} ADD CONSTRAINT #{default_constraint_name(table_name,column_name)} DEFAULT #{quote(options[:default])} FOR #{quote_column_name(column_name)}" + + column_object = schema_cache.columns(table_name).find { |c| c.name.to_s == column_name.to_s } + without_constraints = options.key?(:default) || options.key?(:limit) + default = if !options.key?(:default) && column_object + column_object.default + else + options[:default] + end + + if without_constraints || (column_object && column_object.type != type.to_sym) + remove_default_constraint(table_name, column_name) + indexes = indexes(table_name).select { |index| index.columns.include?(column_name.to_s) } + remove_indexes(table_name, column_name) end - sql_commands.each { |c| do_execute(c) } + + sql_commands << "UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote_default_expression(options[:default], column_object)} WHERE #{quote_column_name(column_name)} IS NULL" if !options[:null].nil? && options[:null] == false && !options[:default].nil? + alter_command = "ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} #{type_to_sql(type, limit: options[:limit], precision: options[:precision], scale: options[:scale])}" + alter_command += " COLLATE #{options[:collation]}" if options[:collation].present? + alter_command += " NOT NULL" if !options[:null].nil? && options[:null] == false + sql_commands << alter_command + + if without_constraints + default = quote_default_expression(default, column_object || column_for(table_name, column_name)) + sql_commands << "ALTER TABLE #{quote_table_name(table_name)} ADD CONSTRAINT #{default_constraint_name(table_name, column_name)} DEFAULT #{default} FOR #{quote_column_name(column_name)}" + end + + sql_commands.each { |c| execute(c) } + + # Add any removed indexes back + indexes.each do |index| + create_index_def = CreateIndexDefinition.new(index) + execute schema_creation.accept(create_index_def) + end + + clear_cache! end - def change_column_default(table_name, column_name, default) + def change_column_default(table_name, column_name, default_or_changes) + clear_cache! + column = column_for(table_name, column_name) + return unless column + remove_default_constraint(table_name, column_name) - do_execute "ALTER TABLE #{quote_table_name(table_name)} ADD CONSTRAINT #{default_constraint_name(table_name, column_name)} DEFAULT #{quote(default)} FOR #{quote_column_name(column_name)}" + default = extract_new_default_value(default_or_changes) + execute "ALTER TABLE #{quote_table_name(table_name)} ADD CONSTRAINT #{default_constraint_name(table_name, column_name)} DEFAULT #{quote_default_expression(default, column)} FOR #{quote_column_name(column_name)}" + clear_cache! end def rename_column(table_name, column_name, new_column_name) - detect_column_for! table_name, column_name - do_execute "EXEC sp_rename '#{table_name}.#{column_name}', '#{new_column_name}', 'COLUMN'" + clear_cache! + identifier = SQLServer::Utils.extract_identifiers("#{table_name}.#{column_name}") + execute_procedure :sp_rename, identifier.quoted, new_column_name, "COLUMN" + rename_column_indexes(table_name, column_name, new_column_name) + clear_cache! + end + + def rename_index(table_name, old_name, new_name) + raise ArgumentError, "Index name '#{new_name}' on table '#{table_name}' is too long (#{new_name.length} characters); the limit is #{index_name_length} characters" if new_name.length > index_name_length + + identifier = SQLServer::Utils.extract_identifiers("#{table_name}.#{old_name}") + execute_procedure :sp_rename, identifier.quoted, new_name, "INDEX" end - + def remove_index!(table_name, index_name) - do_execute "DROP INDEX #{quote_column_name(index_name)} ON #{quote_table_name(table_name)}" + execute "DROP INDEX #{quote_column_name(index_name)} ON #{quote_table_name(table_name)}" + end + + def build_change_column_definition(table_name, column_name, type, **options) # :nodoc: + td = create_table_definition(table_name) + cd = td.new_column_definition(column_name, type, **options) + ChangeColumnDefinition.new(cd, column_name) + end + + def build_change_column_default_definition(table_name, column_name, default_or_changes) # :nodoc: + column = column_for(table_name, column_name) + return unless column + + default = extract_new_default_value(default_or_changes) + ChangeColumnDefaultDefinition.new(column, default) + end + + def foreign_keys(table_name) + identifier = SQLServer::Utils.extract_identifiers(table_name) + fk_info = execute_procedure :sp_fkeys, nil, identifier.schema, nil, identifier.object, identifier.schema + + grouped_fk = fk_info.group_by { |row| row["FK_NAME"] }.values.each { |group| group.sort_by! { |row| row["KEY_SEQ"] } }.reverse + grouped_fk.map do |group| + row = group.first + options = { + name: row["FK_NAME"], + on_update: extract_foreign_key_action("update", row["FK_NAME"]), + on_delete: extract_foreign_key_action("delete", row["FK_NAME"]) + } + + if group.one? + options[:column] = row["FKCOLUMN_NAME"] + options[:primary_key] = row["PKCOLUMN_NAME"] + else + options[:column] = group.map { |row| row["FKCOLUMN_NAME"] } + options[:primary_key] = group.map { |row| row["PKCOLUMN_NAME"] } + end + + ForeignKeyDefinition.new(identifier.object, row["PKTABLE_NAME"], options) + end end - def type_to_sql(type, limit = nil, precision = nil, scale = nil) - type_limitable = ['string','integer','float','char','nchar','varchar','nvarchar'].include?(type.to_s) + def extract_foreign_key_action(action, fk_name) + case select_value("SELECT #{action}_referential_action_desc FROM sys.foreign_keys WHERE name = '#{fk_name}'") + when "CASCADE" then :cascade + when "SET_NULL" then :nullify + end + end + + def check_constraints(table_name) + sql = <<~SQL + select chk.name AS 'name', + chk.definition AS 'expression' + from sys.check_constraints chk + inner join sys.tables st on chk.parent_object_id = st.object_id + where + st.name = '#{table_name}' + SQL + + chk_info = internal_exec_query(sql, "SCHEMA") + + chk_info.map do |row| + options = { + name: row["name"] + } + expression = row["expression"] + expression = expression[1..-2] if expression.start_with?("(") && expression.end_with?(")") + + CheckConstraintDefinition.new(table_name, expression, options) + end + end + + def type_to_sql(type, limit: nil, precision: nil, scale: nil, **) + type_limitable = %w[string integer float char nchar varchar nvarchar binary_basic].include?(type.to_s) limit = nil unless type_limitable + case type.to_s - when 'integer' + when "integer" case limit - when 1..2 then 'smallint' - when 3..4, nil then 'integer' - when 5..8 then 'bigint' - else raise(ActiveRecordError, "No integer type has byte size #{limit}. Use a numeric with precision 0 instead.") + when 1 then "tinyint" + when 2 then "smallint" + when 3..4, nil then "integer" + when 5..8 then "bigint" + else raise(ActiveRecordError, "No integer type has byte size #{limit}. Use a numeric with precision 0 instead.") + end + when "time" # https://learn.microsoft.com/en-us/sql/t-sql/data-types/time-transact-sql + column_type_sql = type.to_s.dup + if precision + if (0..7) === precision + column_type_sql << "(#{precision})" + else + raise(ActiveRecordError, "The time type has precision of #{precision}. The allowed range of precision is from 0 to 7") + end end + column_type_sql + when "datetime2" + column_type_sql = super + if precision + if (0..7) === precision + column_type_sql << "(#{precision})" + else + raise(ActiveRecordError, "The datetime2 type has precision of #{precision}. The allowed range of precision is from 0 to 7") + end + end + column_type_sql + when "datetimeoffset" + column_type_sql = super + if precision + if (0..7) === precision + column_type_sql << "(#{precision})" + else + raise(ActiveRecordError, "The datetimeoffset type has precision of #{precision}. The allowed range of precision is from 0 to 7") + end + end + column_type_sql else super end end + # In SQL Server only the first column added should have the `ADD` keyword. + def add_timestamps(table_name, **options) + fragments = add_timestamps_for_alter(table_name, **options) + fragments[1..].each { |fragment| fragment.sub!("ADD ", "") } + execute "ALTER TABLE #{quote_table_name(table_name)} #{fragments.join(", ")}" + end + + def columns_for_distinct(columns, orders) + order_columns = orders.reject(&:blank?).map { |s| + s = visitor.compile(s) unless s.is_a?(String) + s.gsub(/\s+(?:ASC|DESC)\b/i, "") + .gsub(/\s+NULLS\s+(?:FIRST|LAST)\b/i, "") + } + .reject(&:blank?) + .reject { |s| columns.include?(s) } + + order_columns_aliased = order_columns.map.with_index { |column, i| "#{column} AS alias_#{i}" } + + (order_columns_aliased << super).join(", ") + end + + def update_table_definition(table_name, base) + SQLServer::Table.new(table_name, base) + end + def change_column_null(table_name, column_name, null, default = nil) - column = detect_column_for! table_name, column_name - unless null || default.nil? - do_execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL") + validate_change_column_null_argument!(null) + + table_id = SQLServer::Utils.extract_identifiers(table_name) + column_id = SQLServer::Utils.extract_identifiers(column_name) + column = column_for(table_name, column_name) + if !null.nil? && null == false && !default.nil? + execute("UPDATE #{table_id} SET #{column_id}=#{quote(default)} WHERE #{column_id} IS NULL") end - sql = "ALTER TABLE #{table_name} ALTER COLUMN #{quote_column_name(column_name)} #{type_to_sql column.type, column.limit, column.precision, column.scale}" - sql << " NOT NULL" unless null - do_execute sql + sql = "ALTER TABLE #{table_id} ALTER COLUMN #{column_id} #{type_to_sql column.type, limit: column.limit, precision: column.precision, scale: column.scale}" + sql += " NOT NULL" if !null.nil? && null == false + + execute sql end - - # === SQLServer Specific ======================================== # - - def views - tables('VIEW') - end - - - protected - - # === SQLServer Specific ======================================== # - - def initialize_native_database_types - { - :primary_key => "int NOT NULL IDENTITY(1,1) PRIMARY KEY", - :string => { :name => native_string_database_type, :limit => 255 }, - :text => { :name => native_text_database_type }, - :integer => { :name => "int", :limit => 4 }, - :float => { :name => "float", :limit => 8 }, - :decimal => { :name => "decimal" }, - :datetime => { :name => "datetime" }, - :timestamp => { :name => "datetime" }, - :time => { :name => native_time_database_type }, - :date => { :name => native_date_database_type }, - :binary => { :name => native_binary_database_type }, - :boolean => { :name => "bit"}, - # These are custom types that may move somewhere else for good schema_dumper.rb hacking to output them. - :char => { :name => 'char' }, - :varchar_max => { :name => 'varchar(max)' }, - :nchar => { :name => "nchar" }, - :nvarchar => { :name => "nvarchar", :limit => 255 }, - :nvarchar_max => { :name => "nvarchar(max)" }, - :ntext => { :name => "ntext" }, - :ss_timestamp => { :name => 'timestamp' } - } + + def create_schema_dumper(options) + SQLServer::SchemaDumper.create(self, options) end + def create_schema(schema_name, authorization = nil) + sql = "CREATE SCHEMA [#{schema_name}]" + sql += " AUTHORIZATION [#{authorization}]" if authorization + + execute sql + end + + def change_table_schema(schema_name, table_name) + execute "ALTER SCHEMA [#{schema_name}] TRANSFER [#{table_name}]" + end + + def drop_schema(schema_name) + execute "DROP SCHEMA [#{schema_name}]" + end + + # Returns an array of schema names. + def schema_names + sql = <<~SQL.squish + SELECT name + FROM sys.schemas + WHERE + name NOT LIKE 'db_%' AND + name NOT IN ('INFORMATION_SCHEMA', 'sys', 'guest') + SQL + + query_values(sql, "SCHEMA") + end + + def quoted_include_columns_for_index(column_names) # :nodoc: + return quote_column_name(column_names) if column_names.is_a?(Symbol) + + quoted_columns = column_names.each_with_object({}) do |name, result| + result[name.to_sym] = quote_column_name(name).dup + end + add_options_for_index_columns(quoted_columns).values.join(", ") + end + + private + + def data_source_sql(name = nil, type: nil) + scope = quoted_scope(name, type: type) + + table_schema = lowercase_schema_reflection_sql("TABLE_SCHEMA") + table_name = lowercase_schema_reflection_sql("TABLE_NAME") + database = scope[:database].present? ? "#{scope[:database]}." : "" + table_catalog = scope[:database].present? ? quote(scope[:database]) : "DB_NAME()" + + sql = "SELECT " + sql += " CASE" + sql += " WHEN #{table_schema} = 'dbo' THEN #{table_name}" + sql += " ELSE CONCAT(#{table_schema}, '.', #{table_name})" + sql += " END" + sql += " FROM #{database}INFORMATION_SCHEMA.TABLES WITH (NOLOCK)" + sql += " WHERE TABLE_CATALOG = #{table_catalog}" + sql += " AND TABLE_SCHEMA = #{quote(scope[:schema])}" if scope[:schema] + sql += " AND TABLE_NAME = #{quote(scope[:name])}" if scope[:name] + sql += " AND TABLE_TYPE = #{quote(scope[:type])}" if scope[:type] + sql += " ORDER BY #{table_name}" + sql + end + + def quoted_scope(name = nil, type: nil) + identifier = SQLServer::Utils.extract_identifiers(name) + + {}.tap do |scope| + scope[:database] = identifier.database if identifier.database + scope[:schema] = identifier.schema || "dbo" if name.present? + scope[:name] = identifier.object if identifier.object + scope[:type] = type if type + end + end + + # === SQLServer Specific ======================================== # + def column_definitions(table_name) - db_name = Utils.unqualify_db_name(table_name) - db_name_with_period = "#{db_name}." if db_name - table_schema = Utils.unqualify_table_schema(table_name) - table_name = Utils.unqualify_table_name(table_name) - sql = %{ - SELECT DISTINCT - #{lowercase_schema_reflection_sql('columns.TABLE_NAME')} AS table_name, - #{lowercase_schema_reflection_sql('columns.COLUMN_NAME')} AS name, - columns.DATA_TYPE AS type, - columns.COLUMN_DEFAULT AS default_value, - columns.NUMERIC_SCALE AS numeric_scale, - columns.NUMERIC_PRECISION AS numeric_precision, - columns.ordinal_position, - CASE - WHEN columns.DATA_TYPE IN ('nchar','nvarchar') THEN columns.CHARACTER_MAXIMUM_LENGTH - ELSE COL_LENGTH('#{db_name_with_period}'+columns.TABLE_SCHEMA+'.'+columns.TABLE_NAME, columns.COLUMN_NAME) - END AS [length], - CASE - WHEN columns.IS_NULLABLE = 'YES' THEN 1 - ELSE NULL - END AS [is_nullable], - CASE - WHEN KCU.COLUMN_NAME IS NOT NULL AND TC.CONSTRAINT_TYPE = N'PRIMARY KEY' THEN 1 - ELSE NULL - END AS [is_primary], - c.is_identity AS [is_identity] - FROM #{db_name_with_period}INFORMATION_SCHEMA.COLUMNS columns - LEFT OUTER JOIN #{db_name_with_period}INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC - ON TC.TABLE_NAME = columns.TABLE_NAME - AND TC.CONSTRAINT_TYPE = N'PRIMARY KEY' - LEFT OUTER JOIN #{db_name_with_period}INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KCU - ON KCU.COLUMN_NAME = columns.COLUMN_NAME - AND KCU.CONSTRAINT_NAME = TC.CONSTRAINT_NAME - AND KCU.CONSTRAINT_CATALOG = TC.CONSTRAINT_CATALOG - AND KCU.CONSTRAINT_SCHEMA = TC.CONSTRAINT_SCHEMA - INNER JOIN #{db_name}.sys.schemas AS s - ON s.name = columns.TABLE_SCHEMA - AND s.schema_id = s.schema_id - INNER JOIN #{db_name}.sys.objects AS o - ON s.schema_id = o.schema_id - AND o.is_ms_shipped = 0 - AND o.type IN ('U', 'V') - AND o.name = columns.TABLE_NAME - INNER JOIN #{db_name}.sys.columns AS c - ON o.object_id = c.object_id - AND c.name = columns.COLUMN_NAME - WHERE columns.TABLE_NAME = @0 - AND columns.TABLE_SCHEMA = #{table_schema.blank? ? "schema_name()" : "@1"} - ORDER BY columns.ordinal_position - }.gsub(/[ \t\r\n]+/,' ') - binds = [['table_name', table_name]] - binds << ['table_schema',table_schema] unless table_schema.blank? - results = do_exec_query(sql, 'SCHEMA', binds) - results.collect do |ci| - ci = ci.symbolize_keys - ci[:type] = case ci[:type] - when /^bit|image|text|ntext|datetime$/ - ci[:type] - when /^numeric|decimal$/i - "#{ci[:type]}(#{ci[:numeric_precision]},#{ci[:numeric_scale]})" - when /^float|real$/i - "#{ci[:type]}(#{ci[:numeric_precision]})" - when /^char|nchar|varchar|nvarchar|varbinary|bigint|int|smallint$/ - ci[:length].to_i == -1 ? "#{ci[:type]}(max)" : "#{ci[:type]}(#{ci[:length]})" - else - ci[:type] - end - if ci[:default_value].nil? && schema_cache.view_names.include?(table_name) - real_table_name = table_name_or_views_table_name(table_name) - real_column_name = views_real_column_name(table_name,ci[:name]) - col_default_sql = "SELECT c.COLUMN_DEFAULT FROM #{db_name_with_period}INFORMATION_SCHEMA.COLUMNS c WHERE c.TABLE_NAME = '#{real_table_name}' AND c.COLUMN_NAME = '#{real_column_name}'" - ci[:default_value] = select_value col_default_sql, 'SCHEMA' + identifier = database_prefix_identifier(table_name) + database = identifier.fully_qualified_database_quoted + view_exists = view_exists?(table_name) + + if view_exists + sql = <<~SQL + SELECT LOWER(c.COLUMN_NAME) AS [name], c.COLUMN_DEFAULT AS [default] + FROM #{database}.INFORMATION_SCHEMA.COLUMNS c + WHERE c.TABLE_NAME = #{quote(view_table_name(table_name))} + SQL + results = internal_exec_query(sql, "SCHEMA") + default_functions = results.each.with_object({}) { |row, out| out[row["name"]] = row["default"] }.compact + end + + sql = column_definitions_sql(database, identifier) + + binds = [] + nv128 = SQLServer::Type::UnicodeVarchar.new(limit: 128) + binds << Relation::QueryAttribute.new("TABLE_NAME", identifier.object, nv128) + binds << Relation::QueryAttribute.new("TABLE_SCHEMA", identifier.schema, nv128) unless identifier.schema.blank? + + results = internal_exec_query(sql, "SCHEMA", binds) + raise ActiveRecord::StatementInvalid, "Table '#{table_name}' doesn't exist" if results.empty? + + results.map do |ci| + col = ci.slice("name", "numeric_scale", "numeric_precision", "datetime_precision", "collation", "ordinal_position", "length", "is_computed", "is_persisted", "computed_formula").symbolize_keys + + col[:table_name] = view_exists ? view_table_name(table_name) : table_name + col[:type] = column_type(ci: ci) + col[:default_value], col[:default_function] = default_value_and_function(default: ci["default_value"], + name: ci["name"], + type: col[:type], + original_type: ci["type"], + view_exists: view_exists, + table_name: table_name, + default_functions: default_functions) + + col[:null] = ci["is_nullable"].to_i == 1 + col[:is_primary] = ci["is_primary"].to_i == 1 + + col[:is_identity] = if [true, false].include?(ci["is_identity"]) + ci["is_identity"] + else + ci["is_identity"].to_i == 1 + end + + col + end + end + + def default_value_and_function(default:, name:, type:, original_type:, view_exists:, table_name:, default_functions:) + if default.nil? && view_exists + view_column = views_real_column_name(table_name, name).downcase + default = default_functions[view_column] if view_column.present? + end + + case default + when nil + [nil, nil] + when /\A\((\w+\(\))\)\Z/ + default_function = Regexp.last_match[1] + [nil, default_function] + when /\A\(N'(.*)'\)\Z/m + string_literal = SQLServer::Utils.unquote_string(Regexp.last_match[1]) + [string_literal, nil] + when /CREATE DEFAULT/mi + [nil, nil] + else + type = case type + when /smallint|int|bigint/ then original_type + else type end - ci[:default_value] = case ci[:default_value] - when nil, '(null)', '(NULL)' - nil - when /\A\((\w+\(\))\)\Z/ - ci[:default_function] = $1 - nil - else - match_data = ci[:default_value].match(/\A\(+N?'?(.*?)'?\)+\Z/m) - match_data ? match_data[1] : nil - end - ci[:null] = ci[:is_nullable].to_i == 1 ; ci.delete(:is_nullable) - ci[:is_primary] = ci[:is_primary].to_i == 1 - ci[:is_identity] = ci[:is_identity].to_i == 1 unless [TrueClass, FalseClass].include?(ci[:is_identity].class) - ci + value = default.match(/\A\((.*)\)\Z/m)[1] + value = select_value("SELECT CAST(#{value} AS #{type}) AS value", "SCHEMA") + [value, nil] end end - + + def column_type(ci:) + case ci["type"] + when /^bit|image|text|ntext|datetime$/ + ci["type"] + when /^datetime2|datetimeoffset$/i + "#{ci["type"]}(#{ci["datetime_precision"]})" + when /^time$/i + "#{ci["type"]}(#{ci["datetime_precision"]})" + when /^numeric|decimal$/i + "#{ci["type"]}(#{ci["numeric_precision"]},#{ci["numeric_scale"]})" + when /^float|real$/i + ci["type"] + when /^char|nchar|varchar|nvarchar|binary|varbinary|bigint|int|smallint$/ + (ci["length"].to_i == -1) ? "#{ci["type"]}(max)" : "#{ci["type"]}(#{ci["length"]})" + else + ci["type"] + end + end + + def column_definitions_sql(database, identifier) + database = "TEMPDB" if identifier.temporary_table? + schema_name = "schema_name()" + + if prepared_statements + object_name = "@0" + schema_name = "@1" if identifier.schema.present? + else + object_name = quote(identifier.object) + schema_name = quote(identifier.schema) if identifier.schema.present? + end + + object_id_arg = identifier.schema.present? ? "CONCAT('.',#{schema_name},'.',#{object_name})" : "CONCAT('..',#{object_name})" + object_id_arg = "CONCAT('#{database}',#{object_id_arg})" + + %{ + SELECT + #{lowercase_schema_reflection_sql("o.name")} AS [table_name], + #{lowercase_schema_reflection_sql("c.name")} AS [name], + t.name AS [type], + d.definition AS [default_value], + CASE + WHEN t.name IN ('decimal', 'bigint', 'int', 'money', 'numeric', 'smallint', 'smallmoney', 'tinyint') + THEN c.scale + END AS [numeric_scale], + CASE + WHEN t.name IN ('decimal', 'bigint', 'int', 'money', 'numeric', 'smallint', 'smallmoney', 'tinyint', 'real', 'float') + THEN c.precision + END AS [numeric_precision], + CASE + WHEN t.name IN ('date', 'datetime', 'datetime2', 'datetimeoffset', 'smalldatetime', 'time') + THEN c.scale + END AS [datetime_precision], + c.collation_name AS [collation], + ROW_NUMBER() OVER (ORDER BY c.column_id) AS [ordinal_position], + CASE + WHEN t.name IN ('nchar', 'nvarchar') AND c.max_length > 0 + THEN c.max_length / 2 + ELSE c.max_length + END AS [length], + CASE c.is_nullable + WHEN 1 + THEN 1 + END AS [is_nullable], + CASE + WHEN ic.object_id IS NOT NULL + THEN 1 + END AS [is_primary], + c.is_identity AS [is_identity], + c.is_computed AS [is_computed], + cc.is_persisted AS [is_persisted], + cc.definition AS [computed_formula] + FROM #{database}.sys.columns c + INNER JOIN #{database}.sys.objects o + ON c.object_id = o.object_id + INNER JOIN #{database}.sys.schemas s + ON o.schema_id = s.schema_id + INNER JOIN #{database}.sys.types t + ON c.system_type_id = t.system_type_id + AND c.user_type_id = t.user_type_id + LEFT OUTER JOIN #{database}.sys.default_constraints d + ON c.object_id = d.parent_object_id + AND c.default_object_id = d.object_id + LEFT OUTER JOIN #{database}.sys.key_constraints k + ON c.object_id = k.parent_object_id + AND k.type = 'PK' + LEFT OUTER JOIN #{database}.sys.index_columns ic + ON k.parent_object_id = ic.object_id + AND k.unique_index_id = ic.index_id + AND c.column_id = ic.column_id + LEFT OUTER JOIN #{database}.sys.computed_columns cc + ON c.object_id = cc.object_id + AND c.column_id = cc.column_id + WHERE + o.Object_ID = Object_ID(#{object_id_arg}) + AND s.name = #{schema_name} + ORDER BY + c.column_id + }.gsub(/[ \t\r\n]+/, " ").strip + end + + def remove_columns_for_alter(table_name, *column_names, **options) + first, *rest = column_names + + # return an array like this [DROP COLUMN col_1, col_2, col_3]. Abstract adapter joins fragments with ", " + [remove_column_for_alter(table_name, first)] + rest.map { |column_name| quote_column_name(column_name) } + end + def remove_check_constraints(table_name, column_name) - constraints = select_values "SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE where TABLE_NAME = '#{quote_string(table_name)}' and COLUMN_NAME = '#{quote_string(column_name)}'", 'SCHEMA' + constraints = select_values "SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE where TABLE_NAME = '#{quote_string(table_name)}' and COLUMN_NAME = '#{quote_string(column_name)}'", "SCHEMA" constraints.each do |constraint| - do_execute "ALTER TABLE #{quote_table_name(table_name)} DROP CONSTRAINT #{quote_column_name(constraint)}" + execute "ALTER TABLE #{quote_table_name(table_name)} DROP CONSTRAINT #{quote_column_name(constraint)}" end end def remove_default_constraint(table_name, column_name) - # If their are foreign keys in this table, we could still get back a 2D array, so flatten just in case. - execute_procedure(:sp_helpconstraint, table_name, 'nomsg').flatten.select do |row| - row['constraint_type'] == "DEFAULT on column #{column_name}" + # If there are foreign keys in this table, we could still get back a 2D array, so flatten just in case. + execute_procedure(:sp_helpconstraint, table_name, "nomsg").flatten.select do |row| + row["constraint_type"] == "DEFAULT on column #{column_name}" end.each do |row| - do_execute "ALTER TABLE #{quote_table_name(table_name)} DROP CONSTRAINT #{row['constraint_name']}" + execute "ALTER TABLE #{quote_table_name(table_name)} DROP CONSTRAINT #{row["constraint_name"]}" end end def remove_indexes(table_name, column_name) - indexes(table_name).select{ |index| index.columns.include?(column_name.to_s) }.each do |index| - remove_index(table_name, {:name => index.name}) + indexes(table_name).select { |index| index.columns.include?(column_name.to_s) }.each do |index| + remove_index(table_name, name: index.name) end end - + # === SQLServer Specific (Misc Helpers) ========================= # - + + # Parses just the table name from the SQL. Table name does not include database/schema/etc. def get_table_name(sql) - if sql =~ /^\s*(INSERT|EXEC sp_executesql N'INSERT)\s+INTO\s+([^\(\s]+)\s*|^\s*update\s+([^\(\s]+)\s*/i - $2 || $3 - elsif sql =~ /FROM\s+([^\(\s]+)\s*/i - $1 + tn = get_raw_table_name(sql) + SQLServer::Utils.extract_identifiers(tn).object + end + + # Parses the raw table name that is used in the SQL. Table name could include database/schema/etc. + def get_raw_table_name(sql) + return if sql.blank? + + s = sql.gsub(/^\s*EXEC sp_executesql N'/i, "") + + if s.match?(/^\s*INSERT INTO.*/i) + s.split(/INSERT INTO/i)[1] + .split(/OUTPUT INSERTED/i)[0] + .split(/(DEFAULT)?\s+VALUES/i)[0] + .split(/\bSELECT\b(?![^\[]*\])/i)[0] + .match(/\s*([^(]*)/i)[0] + elsif s.match?(/^\s*UPDATE\s+.*/i) + s.match(/UPDATE\s+([^(\s]+)\s*/i)[1] + elsif s.match?(/^\s*MERGE INTO.*/i) + s.match(/^\s*MERGE\s+INTO\s+(\[?[a-z0-9_ -]+\]?\.?\[?[a-z0-9_ -]+\]?)\s+(AS|WITH|USING)/i)[1] else - nil - end + s.match(/FROM[\s|(]+((\[[^(\]]+\])|[^(\s]+)\s*/i)[1] + end.strip end - + def default_constraint_name(table_name, column_name) "DF_#{table_name}_#{column_name}" end - - def detect_column_for!(table_name, column_name) - unless column = schema_cache.columns[table_name].detect { |c| c.name == column_name.to_s } - raise ActiveRecordError, "No such column: #{table_name}.#{column_name}" - end - column - end - + def lowercase_schema_reflection_sql(node) lowercase_schema_reflection ? "LOWER(#{node})" : node end - + # === SQLServer Specific (View Reflection) ====================== # - + def view_table_name(table_name) - view_info = schema_cache.view_information(table_name) - view_info ? get_table_name(view_info['VIEW_DEFINITION']) : table_name + view_info = view_information(table_name) + view_info.present? ? get_table_name(view_info["VIEW_DEFINITION"]) : table_name end - + def view_information(table_name) - table_name = Utils.unqualify_table_name(table_name) - view_info = select_one "SELECT * FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_NAME = '#{table_name}'", 'SCHEMA' - if view_info - view_info = view_info.with_indifferent_access - if view_info[:VIEW_DEFINITION].blank? || view_info[:VIEW_DEFINITION].length == 4000 - view_info[:VIEW_DEFINITION] = begin - select_values("EXEC sp_helptext #{quote_table_name(table_name)}", 'SCHEMA').join - rescue - warn "No view definition found, possible permissions problem.\nPlease run GRANT VIEW DEFINITION TO your_user;" - nil - end + @view_information ||= {} + + @view_information[table_name] ||= begin + identifier = SQLServer::Utils.extract_identifiers(table_name) + information_query_table = identifier.database.present? ? "[#{identifier.database}].[INFORMATION_SCHEMA].[VIEWS]" : "[INFORMATION_SCHEMA].[VIEWS]" + + view_info = select_one("SELECT * FROM #{information_query_table} WITH (NOLOCK) WHERE TABLE_NAME = #{quote(identifier.object)}", "SCHEMA").to_h + + if view_info.present? + if view_info["VIEW_DEFINITION"].blank? || view_info["VIEW_DEFINITION"].length == 4000 + view_info["VIEW_DEFINITION"] = begin + select_values("EXEC sp_helptext #{identifier.object_quoted}", "SCHEMA").join + rescue + warn "No view definition found, possible permissions problem.\nPlease run GRANT VIEW DEFINITION TO your_user;" + nil + end + end end - end - view_info - end - - def table_name_or_views_table_name(table_name) - unquoted_table_name = Utils.unqualify_table_name(table_name) - schema_cache.view_names.include?(unquoted_table_name) ? view_table_name(unquoted_table_name) : unquoted_table_name - end - - def views_real_column_name(table_name,column_name) - view_definition = schema_cache.view_information(table_name)[:VIEW_DEFINITION] - match_data = view_definition.match(/([\w-]*)\s+as\s+#{column_name}/im) - match_data ? match_data[1] : column_name - end - - # === SQLServer Specific (Identity Inserts) ===================== # - def query_requires_identity_insert?(sql) - if insert_sql?(sql) - table_name = get_table_name(sql) - id_column = identity_column(table_name) - id_column && sql =~ /^\s*(INSERT|EXEC sp_executesql N'INSERT)[^(]+\([^)]*\b(#{id_column.name})\b,?[^)]*\)/i ? quote_table_name(table_name) : false - else - false + view_info end end - - def insert_sql?(sql) - !(sql =~ /^\s*(INSERT|EXEC sp_executesql N'INSERT)/i).nil? - end - - def with_identity_insert_enabled(table_name) - table_name = quote_table_name(table_name_or_views_table_name(table_name)) - set_identity_insert(table_name, true) - yield - ensure - set_identity_insert(table_name, false) - end - def set_identity_insert(table_name, enable = true) - sql = "SET IDENTITY_INSERT #{table_name} #{enable ? 'ON' : 'OFF'}" - do_execute sql, 'SCHEMA' - rescue Exception => e - raise ActiveRecordError, "IDENTITY_INSERT could not be turned #{enable ? 'ON' : 'OFF'} for table #{table_name}" - end + def views_real_column_name(table_name, column_name) + view_definition = view_information(table_name)["VIEW_DEFINITION"] + return column_name if view_definition.blank? - def identity_column(table_name) - schema_cache.columns[table_name].detect(&:is_identity?) + # Remove "CREATE VIEW ... AS SELECT ..." and then match the column name. + match_data = view_definition.sub(/CREATE\s+VIEW.*AS\s+SELECT\s/, "").match(/([\w-]*)\s+AS\s+#{column_name}\W/im) + match_data ? match_data[1] : column_name end + def create_table_definition(*args, **options) + SQLServer::TableDefinition.new(self, *args, **options) + end end end end diff --git a/lib/active_record/connection_adapters/sqlserver/showplan.rb b/lib/active_record/connection_adapters/sqlserver/showplan.rb index adba2a0a0..30932ee72 100644 --- a/lib/active_record/connection_adapters/sqlserver/showplan.rb +++ b/lib/active_record/connection_adapters/sqlserver/showplan.rb @@ -1,66 +1,66 @@ -require 'active_record/connection_adapters/sqlserver/showplan/printer_table' -require 'active_record/connection_adapters/sqlserver/showplan/printer_xml' +# frozen_string_literal: true + +require "active_record/connection_adapters/sqlserver/showplan/printer_table" +require "active_record/connection_adapters/sqlserver/showplan/printer_xml" module ActiveRecord module ConnectionAdapters - module Sqlserver + module SQLServer module Showplan - - OPTION_ALL = 'SHOWPLAN_ALL' - OPTION_TEXT = 'SHOWPLAN_TEXT' - OPTION_XML = 'SHOWPLAN_XML' + OPTION_ALL = "SHOWPLAN_ALL" + OPTION_TEXT = "SHOWPLAN_TEXT" + OPTION_XML = "SHOWPLAN_XML" OPTIONS = [OPTION_ALL, OPTION_TEXT, OPTION_XML] - - def explain(arel, binds = []) + + def explain(arel, binds = [], options = []) sql = to_sql(arel) - result = with_showplan_on { do_exec_query(sql, 'EXPLAIN', binds) } + result = with_showplan_on { internal_exec_query(sql, "EXPLAIN", binds) } printer = showplan_printer.new(result) + printer.pp end - - + protected - + def with_showplan_on set_showplan_option(true) yield ensure set_showplan_option(false) end - + def set_showplan_option(enable = true) - sql = "SET #{option} #{enable ? 'ON' : 'OFF'}" - raw_connection_do(sql) - rescue Exception => e - raise ActiveRecordError, "#{option} could not be turned #{enable ? 'ON' : 'OFF'}, perhaps you do not have SHOWPLAN permissions?" + sql = "SET #{showplan_option} #{enable ? "ON" : "OFF"}" + raw_execute(sql, "SCHEMA") + rescue + raise ActiveRecordError, "#{showplan_option} could not be turned #{enable ? "ON" : "OFF"}, perhaps you do not have SHOWPLAN permissions?" end - - def option + + def showplan_option (SQLServerAdapter.showplan_option || OPTION_ALL).tap do |opt| raise(ArgumentError, "Unknown SHOWPLAN option #{opt.inspect} found.") if OPTIONS.exclude?(opt) end end - + def showplan_all? - option == OPTION_ALL + showplan_option == OPTION_ALL end - + def showplan_text? - option == OPTION_TEXT + showplan_option == OPTION_TEXT end - + def showplan_xml? - option == OPTION_XML + showplan_option == OPTION_XML end - + def showplan_printer - case option + case showplan_option when OPTION_XML then PrinterXml when OPTION_ALL, OPTION_TEXT then PrinterTable else PrinterTable end end - end end end diff --git a/lib/active_record/connection_adapters/sqlserver/showplan/printer_table.rb b/lib/active_record/connection_adapters/sqlserver/showplan/printer_table.rb index f6b28d4f8..0395ddb42 100644 --- a/lib/active_record/connection_adapters/sqlserver/showplan/printer_table.rb +++ b/lib/active_record/connection_adapters/sqlserver/showplan/printer_table.rb @@ -1,19 +1,20 @@ +# frozen_string_literal: true + module ActiveRecord module ConnectionAdapters - module Sqlserver + module SQLServer module Showplan class PrinterTable - cattr_accessor :max_column_width, :cell_padding self.max_column_width = 50 self.cell_padding = 1 - + attr_reader :result - + def initialize(result) @result = result end - + def pp @widths = compute_column_widths @separator = build_separator @@ -27,7 +28,7 @@ def pp pp << @separator pp.join("\n") + "\n" end - + private def compute_column_widths @@ -35,34 +36,32 @@ def compute_column_widths result.columns.each_with_index do |column, i| cells_in_column = [column] + result.rows.map { |r| cast_item(r[i]) } computed_width = cells_in_column.map(&:length).max - final_width = computed_width > max_column_width ? max_column_width : computed_width + final_width = (computed_width > max_column_width) ? max_column_width : computed_width computed_widths << final_width end end end - + def build_separator - '+' + @widths.map {|w| '-' * (w + (cell_padding*2))}.join('+') + '+' + "+" + @widths.map { |w| "-" * (w + (cell_padding * 2)) }.join("+") + "+" end - + def build_cells(items) cells = [] items.each_with_index do |item, i| cells << cast_item(item).ljust(@widths[i]) end - "| #{cells.join(' | ')} |" + "| #{cells.join(" | ")} |" end - + def cast_item(item) case item - when NilClass then 'NULL' + when NilClass then "NULL" when Float then item.to_s.to(9) else item.to_s.truncate(max_column_width) end end - end - end end end diff --git a/lib/active_record/connection_adapters/sqlserver/showplan/printer_xml.rb b/lib/active_record/connection_adapters/sqlserver/showplan/printer_xml.rb index e90f0ba73..717eaf859 100644 --- a/lib/active_record/connection_adapters/sqlserver/showplan/printer_xml.rb +++ b/lib/active_record/connection_adapters/sqlserver/showplan/printer_xml.rb @@ -1,24 +1,23 @@ +# frozen_string_literal: true + module ActiveRecord module ConnectionAdapters - module Sqlserver + module SQLServer module Showplan class PrinterXml - def initialize(result) @result = result end - + def pp xml = @result.rows.first.first if defined?(Nokogiri) - Nokogiri::XML(xml).to_xml :indent => 2, :encoding => 'UTF-8' + Nokogiri::XML(xml).to_xml indent: 2, encoding: "UTF-8" else xml end end - end - end end end diff --git a/lib/active_record/connection_adapters/sqlserver/sql_type_metadata.rb b/lib/active_record/connection_adapters/sqlserver/sql_type_metadata.rb new file mode 100644 index 000000000..b3f1222d2 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/sql_type_metadata.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + class TypeMetadata < DelegateClass(SqlTypeMetadata) + undef to_yaml if method_defined?(:to_yaml) + + include Deduplicable + + attr_reader :is_identity, :is_primary, :table_name, :ordinal_position + + def initialize(type_metadata, is_identity: nil, is_primary: nil, table_name: nil, ordinal_position: nil) + super(type_metadata) + @is_identity = is_identity + @is_primary = is_primary + @table_name = table_name + @ordinal_position = ordinal_position + end + + def ==(other) + other.is_a?(TypeMetadata) && + __getobj__ == other.__getobj__ && + is_identity == other.is_identity && + is_primary == other.is_primary && + table_name == other.table_name && + ordinal_position == other.ordinal_position + end + alias_method :eql?, :== + + def hash + [TypeMetadata, __getobj__, is_identity, is_primary, table_name, ordinal_position].hash + end + + private + + def deduplicated + __setobj__(__getobj__.deduplicate) + super + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/table_definition.rb b/lib/active_record/connection_adapters/sqlserver/table_definition.rb new file mode 100644 index 000000000..e68a17785 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/table_definition.rb @@ -0,0 +1,131 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module ColumnMethods + def primary_key(name, type = :primary_key, **options) + if [:integer, :bigint].include?(type) + options[:is_identity] = true unless options.key?(:default) + elsif type == :uuid + options[:default] = options.fetch(:default, "NEWID()") + end + super + end + + def primary_key_nonclustered(*names, **options) + names.each { |name| column(name, :primary_key_nonclustered, **options) } + end + + def real(*names, **options) + names.each { |name| column(name, :real, **options) } + end + + def money(*names, **options) + names.each { |name| column(name, :money, **options) } + end + + def smalldatetime(*names, **options) + names.each { |name| column(name, :smalldatetime, **options) } + end + + def datetime(*names, **options) + names.each do |name| + if options[:precision] + datetime2(name, **options) + else + column(name, :datetime, **options) + end + end + end + + def datetime2(*names, **options) + names.each { |name| column(name, :datetime2, **options) } + end + + def datetimeoffset(*names, **options) + names.each { |name| column(name, :datetimeoffset, **options) } + end + + def smallmoney(*names, **options) + names.each { |name| column(name, :smallmoney, **options) } + end + + def char(*names, **options) + names.each { |name| column(name, :char, **options) } + end + + def varchar(*names, **options) + names.each { |name| column(name, :varchar, **options) } + end + + def varchar_max(*names, **options) + names.each { |name| column(name, :varchar_max, **options) } + end + + def text_basic(*names, **options) + names.each { |name| column(name, :text_basic, **options) } + end + + def nchar(*names, **options) + names.each { |name| column(name, :nchar, **options) } + end + + def ntext(*names, **options) + names.each { |name| column(name, :ntext, **options) } + end + + def binary_basic(*names, **options) + names.each { |name| column(name, :binary_basic, **options) } + end + + def varbinary(*names, **options) + names.each { |name| column(name, :varbinary, **options) } + end + + def uuid(*names, **options) + names.each { |name| column(name, :uniqueidentifier, **options) } + end + + def ss_timestamp(*names, **options) + names.each { |name| column(name, :ss_timestamp, **options) } + end + + def json(*names, **options) + names.each { |name| column(name, :text, **options) } + end + end + + class TableDefinition < ActiveRecord::ConnectionAdapters::TableDefinition + include ColumnMethods + + def new_column_definition(name, type, **options) + case type + when :datetime, :timestamp + # If no precision then default it to 6. + options[:precision] = 6 unless options.key?(:precision) + + # If there is precision then column must be of type 'datetime2'. + type = :datetime2 unless options[:precision].nil? + when :primary_key + options[:is_identity] = true + when :virtual + type = options[:type] + end + + super + end + + private + + def valid_column_definition_options + super + [:is_identity, :as, :stored] + end + end + + class Table < ActiveRecord::ConnectionAdapters::Table + include ColumnMethods + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/transaction.rb b/lib/active_record/connection_adapters/sqlserver/transaction.rb new file mode 100644 index 000000000..b2677dc30 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/transaction.rb @@ -0,0 +1,59 @@ +# frozen_string_literal: true + +require "active_record/connection_adapters/abstract/transaction" + +module ActiveRecord + module ConnectionAdapters + module SQLServerTransaction + delegate :sqlserver?, to: :connection, prefix: true + + private + + def current_isolation_level + return unless connection_sqlserver? + + level = connection.user_options_isolation_level + # When READ_COMMITTED_SNAPSHOT is set to ON, + # user_options_isolation_level will be equal to 'read committed + # snapshot' which is not a valid isolation level + if level.blank? || level == "read committed snapshot" + "READ COMMITTED" + else + level.upcase + end + end + end + + Transaction.send :prepend, SQLServerTransaction + + module SQLServerRealTransaction + attr_reader :starting_isolation_level + + def initialize(connection, isolation: nil, joinable: true, run_commit_callbacks: false) + @connection = connection + @starting_isolation_level = current_isolation_level if isolation + super + end + + def commit + super + reset_starting_isolation_level + end + + def rollback + super + reset_starting_isolation_level + end + + private + + def reset_starting_isolation_level + if connection_sqlserver? && starting_isolation_level + connection.set_transaction_isolation_level(starting_isolation_level) + end + end + end + + RealTransaction.send :prepend, SQLServerRealTransaction + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type.rb b/lib/active_record/connection_adapters/sqlserver/type.rb new file mode 100644 index 000000000..25be8287a --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +require "active_record/type" +# Behaviors +require "active_record/connection_adapters/sqlserver/type/data" +require "active_record/connection_adapters/sqlserver/type/time_value_fractional" +# Exact Numerics +require "active_record/connection_adapters/sqlserver/type/integer" +require "active_record/connection_adapters/sqlserver/type/big_integer" +require "active_record/connection_adapters/sqlserver/type/small_integer" +require "active_record/connection_adapters/sqlserver/type/tiny_integer" +require "active_record/connection_adapters/sqlserver/type/boolean" +require "active_record/connection_adapters/sqlserver/type/decimal" +require "active_record/connection_adapters/sqlserver/type/decimal_without_scale" +require "active_record/connection_adapters/sqlserver/type/money" +require "active_record/connection_adapters/sqlserver/type/small_money" +# Approximate Numerics +require "active_record/connection_adapters/sqlserver/type/float" +require "active_record/connection_adapters/sqlserver/type/real" +# Date and Time +require "active_record/connection_adapters/sqlserver/type/date" +require "active_record/connection_adapters/sqlserver/type/datetime" +require "active_record/connection_adapters/sqlserver/type/datetime2" +require "active_record/connection_adapters/sqlserver/type/datetimeoffset" +require "active_record/connection_adapters/sqlserver/type/smalldatetime" +require "active_record/connection_adapters/sqlserver/type/time" +# Character Strings +require "active_record/connection_adapters/sqlserver/type/string" +require "active_record/connection_adapters/sqlserver/type/char" +require "active_record/connection_adapters/sqlserver/type/varchar" +require "active_record/connection_adapters/sqlserver/type/varchar_max" +require "active_record/connection_adapters/sqlserver/type/text" +# Unicode Character Strings +require "active_record/connection_adapters/sqlserver/type/unicode_string" +require "active_record/connection_adapters/sqlserver/type/unicode_char" +require "active_record/connection_adapters/sqlserver/type/unicode_varchar" +require "active_record/connection_adapters/sqlserver/type/unicode_varchar_max" +require "active_record/connection_adapters/sqlserver/type/unicode_text" +# Binary Strings +require "active_record/connection_adapters/sqlserver/type/binary" +require "active_record/connection_adapters/sqlserver/type/varbinary" +require "active_record/connection_adapters/sqlserver/type/varbinary_max" +# Other Data Types +require "active_record/connection_adapters/sqlserver/type/uuid" +require "active_record/connection_adapters/sqlserver/type/timestamp" +require "active_record/connection_adapters/sqlserver/type/json" + +module ActiveRecord + module Type + SQLServer = ConnectionAdapters::SQLServer::Type + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/big_integer.rb b/lib/active_record/connection_adapters/sqlserver/type/big_integer.rb new file mode 100644 index 000000000..2cb2978ab --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/big_integer.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class BigInteger < Integer + def sqlserver_type + "bigint" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/binary.rb b/lib/active_record/connection_adapters/sqlserver/type/binary.rb new file mode 100644 index 000000000..1f14a4078 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/binary.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Binary < ActiveRecord::Type::Binary + def type + :binary_basic + end + + def sqlserver_type + "binary".yield_self do |type| + type += "(#{limit})" if limit + type + end + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/boolean.rb b/lib/active_record/connection_adapters/sqlserver/type/boolean.rb new file mode 100644 index 000000000..6c31f390d --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/boolean.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Boolean < ActiveRecord::Type::Boolean + def sqlserver_type + "bit" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/char.rb b/lib/active_record/connection_adapters/sqlserver/type/char.rb new file mode 100644 index 000000000..2084f5132 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/char.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Char < String + def type + :char + end + + def serialize(value) + return if value.nil? + return value if value.is_a?(Data) + + Data.new super, self + end + + def sqlserver_type + "char".yield_self do |type| + type += "(#{limit})" if limit + type + end + end + + def quoted(value) + return value.quoted_id if value.respond_to?(:quoted_id) + + Utils.quote_string_single(value) + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/data.rb b/lib/active_record/connection_adapters/sqlserver/type/data.rb new file mode 100644 index 000000000..adddc5014 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/data.rb @@ -0,0 +1,53 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Data + attr_reader :value, :type + + delegate :sub, to: :value + + def initialize(value, type) + @value, @type = value, type + end + + def quoted + type.quoted(@value) + end + + def to_s + @value + end + alias_method :to_str, :to_s + + def inspect + @value.inspect + end + + def eql?(other) + # Support comparing `Type::Char`, `Type::Varchar` and `VarcharMax` with strings. + # This happens when we use enum with string columns. + if other.is_a?(::String) + return type.is_a?(ActiveRecord::ConnectionAdapters::SQLServer::Type::String) && value == other + end + + self.class == other.class && value == other.value + end + alias_method :==, :eql? + + def self.from_msgpack_ext(string) + type, value = string.chomp!("msgpack_ext").split(",") + + Data.new(value, type.constantize) + end + + def to_msgpack_ext + [type.class.to_s, value].join(",") + "msgpack_ext" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/date.rb b/lib/active_record/connection_adapters/sqlserver/type/date.rb new file mode 100644 index 000000000..56e652135 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/date.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Date < ActiveRecord::Type::Date + def sqlserver_type + "date" + end + + def serialize(_value) + value = super + return value unless value.acts_like?(:date) + + date = super.to_formatted_s(:_sqlserver_dateformat) + Data.new(date, self) + end + + def deserialize(value) + value.is_a?(Data) ? super(value.value) : super + end + + def type_cast_for_schema(value) + serialize(value).quoted + end + + def quoted(value) + Utils.quote_string_single(value) + end + + private + + def fast_string_to_date(string) + ::Date.strptime(string, fast_string_to_date_format) + rescue ArgumentError + super + end + + def fast_string_to_date_format + ::Date::DATE_FORMATS[:_sqlserver_dateformat] + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/datetime.rb b/lib/active_record/connection_adapters/sqlserver/type/datetime.rb new file mode 100644 index 000000000..d1d8f739f --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/datetime.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class DateTime < ActiveRecord::Type::DateTime + include TimeValueFractional + + def sqlserver_type + "datetime" + end + + def serialize(_value) + value = super + return value unless value.acts_like?(:time) + + datetime = "#{value.to_formatted_s(:_sqlserver_datetime)}.#{quote_fractional(value)}" + Data.new(datetime, self) + end + + def deserialize(value) + value.is_a?(Data) ? super(value.value) : super + end + + def type_cast_for_schema(value) + serialize(value).quoted + end + + def quoted(value) + Utils.quote_string_single(value) + end + + private + + def fast_string_to_time(string) + time = ActiveSupport::TimeZone["UTC"].strptime(string, fast_string_to_time_format) + new_time(time.year, time.month, time.day, time.hour, + time.min, time.sec, Rational(time.nsec, 1_000)) + rescue ArgumentError + super + end + + def fast_string_to_time_format + "#{::Time::DATE_FORMATS[:_sqlserver_datetime]}.%N" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/datetime2.rb b/lib/active_record/connection_adapters/sqlserver/type/datetime2.rb new file mode 100644 index 000000000..bb6867416 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/datetime2.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class DateTime2 < DateTime + include TimeValueFractional2 + + def sqlserver_type + "datetime2(#{precision.to_i})" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/datetimeoffset.rb b/lib/active_record/connection_adapters/sqlserver/type/datetimeoffset.rb new file mode 100644 index 000000000..195e5c973 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/datetimeoffset.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class DateTimeOffset < DateTime2 + def type + :datetimeoffset + end + + def sqlserver_type + "datetimeoffset(#{precision.to_i})" + end + + def quoted(value) + Utils.quote_string_single(value) + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/decimal.rb b/lib/active_record/connection_adapters/sqlserver/type/decimal.rb new file mode 100644 index 000000000..e7b89f32d --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/decimal.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Decimal < ActiveRecord::Type::Decimal + def sqlserver_type + "decimal".yield_self do |type| + type += "(#{precision.to_i},#{scale.to_i})" if precision || scale + type + end + end + + def type_cast_for_schema(value) + value.is_a?(BigDecimal) ? value.to_s : value.inspect + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/decimal_without_scale.rb b/lib/active_record/connection_adapters/sqlserver/type/decimal_without_scale.rb new file mode 100644 index 000000000..ddf8068c0 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/decimal_without_scale.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class DecimalWithoutScale < ActiveRecord::Type::DecimalWithoutScale + def sqlserver_type + "decimal".yield_self do |type| + type += "(#{precision.to_i},0)" if precision + type + end + end + + def type_cast_for_schema(value) + value.is_a?(BigDecimal) ? value.to_s : value.inspect + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/float.rb b/lib/active_record/connection_adapters/sqlserver/type/float.rb new file mode 100644 index 000000000..95589bd33 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/float.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Float < ActiveRecord::Type::Float + def type + :float + end + + def sqlserver_type + "float" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/integer.rb b/lib/active_record/connection_adapters/sqlserver/type/integer.rb new file mode 100644 index 000000000..c6cabcd8a --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/integer.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Integer < ActiveRecord::Type::Integer + def sqlserver_type + "int" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/json.rb b/lib/active_record/connection_adapters/sqlserver/type/json.rb new file mode 100644 index 000000000..5a90b3e38 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/json.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Json < ActiveRecord::Type::Json + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/money.rb b/lib/active_record/connection_adapters/sqlserver/type/money.rb new file mode 100644 index 000000000..8997832dd --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/money.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Money < Decimal + def initialize(**args) + super + @precision = 19 + @scale = 4 + end + + def type + :money + end + + def sqlserver_type + "money" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/real.rb b/lib/active_record/connection_adapters/sqlserver/type/real.rb new file mode 100644 index 000000000..dbeef954c --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/real.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Real < Float + def type + :real + end + + def sqlserver_type + "real" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/small_integer.rb b/lib/active_record/connection_adapters/sqlserver/type/small_integer.rb new file mode 100644 index 000000000..1b408c101 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/small_integer.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class SmallInteger < Integer + def sqlserver_type + "smallint" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/small_money.rb b/lib/active_record/connection_adapters/sqlserver/type/small_money.rb new file mode 100644 index 000000000..f69c5dc00 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/small_money.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class SmallMoney < Money + def initialize(**args) + super + @precision = 10 + @scale = 4 + end + + def type + :smallmoney + end + + def sqlserver_type + "smallmoney" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/smalldatetime.rb b/lib/active_record/connection_adapters/sqlserver/type/smalldatetime.rb new file mode 100644 index 000000000..9ac526a41 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/smalldatetime.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class SmallDateTime < DateTime + def type + :smalldatetime + end + + def sqlserver_type + "smalldatetime" + end + + private + + def fast_string_to_time_format + ::Time::DATE_FORMATS[:_sqlserver_datetime] + end + + def apply_seconds_precision(value) + value&.change(usec: 0) + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/string.rb b/lib/active_record/connection_adapters/sqlserver/type/string.rb new file mode 100644 index 000000000..97f15bad0 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/string.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class String < ActiveRecord::Type::String + def changed_in_place?(raw_old_value, new_value) + if raw_old_value.is_a?(Data) + raw_old_value.value != new_value + else + super + end + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/text.rb b/lib/active_record/connection_adapters/sqlserver/type/text.rb new file mode 100644 index 000000000..248e27de6 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/text.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Text < VarcharMax + def type + :text_basic + end + + def sqlserver_type + "text" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/time.rb b/lib/active_record/connection_adapters/sqlserver/type/time.rb new file mode 100644 index 000000000..407a644d6 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/time.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Time < ActiveRecord::Type::Time + include TimeValueFractional2 + + def serialize(_value) + value = super + return value unless value.acts_like?(:time) + + time = "#{value.to_formatted_s(:_sqlserver_time)}.#{quote_fractional(value)}" + Data.new(time, self) + end + + def deserialize(value) + value.is_a?(Data) ? super(value.value) : super + end + + def type_cast_for_schema(value) + serialize(value).quoted + end + + def sqlserver_type + "time(#{precision.to_i})" + end + + def quoted(value) + Utils.quote_string_single(value) + end + + private + + def cast_value(_value) + value = super + return value unless value.is_a?(::Time) + + value = value.change(year: 2000, month: 0o1, day: 0o1) + apply_seconds_precision(value) + end + + def fractional_scale + precision + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/time_value_fractional.rb b/lib/active_record/connection_adapters/sqlserver/type/time_value_fractional.rb new file mode 100644 index 000000000..7deff427d --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/time_value_fractional.rb @@ -0,0 +1,90 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + module TimeValueFractional + private + + def apply_seconds_precision(value) + return value if !value.respond_to?(fractional_property) || value.send(fractional_property).zero? + + value.change fractional_property => seconds_precision(value) + end + + def seconds_precision(value) + return 0 if fractional_scale == 0 + + seconds = value.send(fractional_property).to_f / fractional_operator.to_f + seconds = ((seconds * (1 / fractional_precision)).round / (1 / fractional_precision)).round(fractional_scale) + (seconds * fractional_operator).round(0).to_i + end + + def quote_fractional(value) + return 0 if fractional_scale == 0 + + frac_seconds = seconds_precision(value) + seconds = (frac_seconds.to_f / fractional_operator.to_f).round(fractional_scale) + seconds.to_d.to_s.split(".").last.to(fractional_scale - 1) + end + + def fractional_property + :usec + end + + def fractional_digits + 6 + end + + def fractional_operator + 10**fractional_digits + end + + def fractional_precision + 0.00333 + end + + def fractional_scale + 3 + end + end + + module TimeValueFractional2 + include TimeValueFractional + + private + + def seconds_precision(value) + seconds = super + (seconds > fractional_max) ? fractional_scale_max : seconds + end + + def fractional_property + :nsec + end + + def fractional_digits + 9 + end + + def fractional_precision + 0.0000001 + end + + def fractional_scale + precision + end + + def fractional_max + 999999999 + end + + def fractional_scale_max + ("9" * fractional_scale) + ("0" * (fractional_digits - fractional_scale)) + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/timestamp.rb b/lib/active_record/connection_adapters/sqlserver/type/timestamp.rb new file mode 100644 index 000000000..8eead81a3 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/timestamp.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Timestamp < Binary + def type + :ss_timestamp + end + + def sqlserver_type + "timestamp" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/tiny_integer.rb b/lib/active_record/connection_adapters/sqlserver/type/tiny_integer.rb new file mode 100644 index 000000000..b90d94ffb --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/tiny_integer.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class TinyInteger < Integer + def sqlserver_type + "tinyint" + end + + private + + def max_value + 256 + end + + def min_value + 0 + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/unicode_char.rb b/lib/active_record/connection_adapters/sqlserver/type/unicode_char.rb new file mode 100644 index 000000000..8e5ff566e --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/unicode_char.rb @@ -0,0 +1,22 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class UnicodeChar < UnicodeString + def type + :nchar + end + + def sqlserver_type + "nchar".yield_self do |type| + type += "(#{limit})" if limit + type + end + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/unicode_string.rb b/lib/active_record/connection_adapters/sqlserver/type/unicode_string.rb new file mode 100644 index 000000000..3a6ac257e --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/unicode_string.rb @@ -0,0 +1,12 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class UnicodeString < String + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/unicode_text.rb b/lib/active_record/connection_adapters/sqlserver/type/unicode_text.rb new file mode 100644 index 000000000..de0be9af4 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/unicode_text.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class UnicodeText < UnicodeVarcharMax + def type + :ntext + end + + def sqlserver_type + "ntext" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/unicode_varchar.rb b/lib/active_record/connection_adapters/sqlserver/type/unicode_varchar.rb new file mode 100644 index 000000000..53551c442 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/unicode_varchar.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class UnicodeVarchar < UnicodeChar + def initialize(**args) + super + @limit = 4000 if @limit.to_i == 0 + end + + def type + :string + end + + def sqlserver_type + "nvarchar".yield_self do |type| + type += "(#{limit})" if limit + type + end + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/unicode_varchar_max.rb b/lib/active_record/connection_adapters/sqlserver/type/unicode_varchar_max.rb new file mode 100644 index 000000000..aee2ddc7b --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/unicode_varchar_max.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class UnicodeVarcharMax < UnicodeVarchar + def initialize(**args) + super + @limit = 2_147_483_647 + end + + def type + :text + end + + def sqlserver_type + "nvarchar(max)" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/uuid.rb b/lib/active_record/connection_adapters/sqlserver/type/uuid.rb new file mode 100644 index 000000000..7bf2b25d1 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/uuid.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Uuid < String + ACCEPTABLE_UUID = %r{\A\{?([a-fA-F0-9]{4}-?){8}\}?\z}x + + def type + :uuid + end + + def sqlserver_type + "uniqueidentifier" + end + + def serialize(value) + return unless value + + Data.new super, self + end + + def cast(value) + value.to_s[ACCEPTABLE_UUID, 0] + end + + def quoted(value) + Utils.quote_string_single(value) if value + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/varbinary.rb b/lib/active_record/connection_adapters/sqlserver/type/varbinary.rb new file mode 100644 index 000000000..2caa1228b --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/varbinary.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Varbinary < Binary + def initialize(**args) + super + @limit = 8000 if @limit.to_i == 0 + end + + def type + :varbinary + end + + def sqlserver_type + "varbinary".yield_self do |type| + type += "(#{limit})" if limit + type + end + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/varbinary_max.rb b/lib/active_record/connection_adapters/sqlserver/type/varbinary_max.rb new file mode 100644 index 000000000..79291ba63 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/varbinary_max.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class VarbinaryMax < Varbinary + def initialize(**args) + super + @limit = 2_147_483_647 + end + + def type + :binary + end + + def sqlserver_type + "varbinary(max)" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/varchar.rb b/lib/active_record/connection_adapters/sqlserver/type/varchar.rb new file mode 100644 index 000000000..466a3a43a --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/varchar.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class Varchar < Char + def initialize(**args) + super + @limit = 8000 if @limit.to_i == 0 + end + + def type + :varchar + end + + def sqlserver_type + "varchar".yield_self do |type| + type += "(#{limit})" if limit + type + end + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/type/varchar_max.rb b/lib/active_record/connection_adapters/sqlserver/type/varchar_max.rb new file mode 100644 index 000000000..75cb2260d --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/type/varchar_max.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Type + class VarcharMax < Varchar + def initialize(**args) + super + @limit = 2_147_483_647 + end + + def type + :varchar_max + end + + def sqlserver_type + "varchar(max)" + end + end + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver/utils.rb b/lib/active_record/connection_adapters/sqlserver/utils.rb index bab688063..77ec1d734 100644 --- a/lib/active_record/connection_adapters/sqlserver/utils.rb +++ b/lib/active_record/connection_adapters/sqlserver/utils.rb @@ -1,32 +1,158 @@ +# frozen_string_literal: true + +require "strscan" + module ActiveRecord module ConnectionAdapters - module Sqlserver - class Utils - - class << self - - def unquote_string(string) - string.to_s.gsub(/\'\'/, "'") + module SQLServer + module Utils + # Value object to return identifiers from SQL Server names http://bit.ly/1CZ3EiL + # Inspired from Rails PostgreSQL::Name adapter object in their own Utils. + class Name + UNQUOTED_SCANNER = /\]?\./ + QUOTED_SCANNER = /\A\[.*?\]\./ + QUOTED_CHECKER = /\A\[/ + + attr_reader :server, :database, :schema, :object + attr_reader :raw_name + + def initialize(name) + @raw_name = name.to_s + parse_raw_name end - - def unqualify_table_name(table_name) - table_name.to_s.split('.').last.tr('[]','') + + def object_quoted + quote object + end + + def schema_quoted + schema ? quote(schema) : schema + end + + def database_quoted + database ? quote(database) : database + end + + def server_quoted + server ? quote(server) : server + end + + def fully_qualified_database_quoted + [server_quoted, database_quoted].compact.join(".") + end + + def fully_qualified? + qualified_level == :fully + end + + def qualified_level + case parts.compact.size + when 4 + :fully + when 3 + :database + when 2 + :schema + when 1 + :table + else + :none + end + end + + def to_s + quoted + end + + def quoted + parts.map { |p| quote(p) if p }.join(".") + end + + def quoted_raw + quote @raw_name + end + + def ==(other) + other.class == self.class && other.parts == parts end + alias_method :eql?, :== - def unqualify_table_schema(table_name) - table_name.to_s.split('.')[-2].gsub(/[\[\]]/,'') rescue nil + def hash + parts.hash end - def unqualify_db_name(table_name) - table_names = table_name.to_s.split('.') - table_names.length == 3 ? table_names.first.tr('[]','') : nil + def temporary_table? + object.start_with?("#") end - + + protected + + def parse_raw_name + @parts = [] + return if raw_name.blank? + + scanner = StringScanner.new(raw_name) + matched = scanner.exist?(QUOTED_CHECKER) ? scanner.scan_until(QUOTED_SCANNER) : scanner.scan_until(UNQUOTED_SCANNER) + while matched + part = matched[0..-2] + @parts << (part.blank? ? nil : unquote(part)) + matched = scanner.exist?(QUOTED_CHECKER) ? scanner.scan_until(QUOTED_SCANNER) : scanner.scan_until(UNQUOTED_SCANNER) + end + case @parts.length + when 3 + @server, @database, @schema = @parts + when 2 + @database, @schema = @parts + when 1 + @schema = @parts.first + end + rest = scanner.rest + rest = rest.start_with?(".") ? rest[1..] : rest[0..] + @object = unquote(rest) + @parts << @object + end + + def quote(part) + /\A\[.*\]\z/.match?(part) ? part : "[#{part.to_s.gsub("]", "]]")}]" + end + + def unquote(part) + if part&.start_with?("[") + part[1..-2] + else + part + end + end + + attr_reader :parts + end + + extend self + + def quote_string(s) + s.to_s.gsub("'", "''") + end + + def quote_string_single(s) + "'#{quote_string(s)}'" + end + + def quote_string_single_national(s) + "N'#{quote_string(s)}'" + end + + def quoted_raw(name) + SQLServer::Utils::Name.new(name).quoted_raw + end + + def unquote_string(s) + s.to_s.gsub("''", "'") + end + + def extract_identifiers(name) + SQLServer::Utils::Name.new(name) end - end end end end - - diff --git a/lib/active_record/connection_adapters/sqlserver/version.rb b/lib/active_record/connection_adapters/sqlserver/version.rb new file mode 100644 index 000000000..ff7577895 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver/version.rb @@ -0,0 +1,11 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + module Version + VERSION = File.read(File.expand_path("../../../../../VERSION", __FILE__)).chomp + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver_adapter.rb b/lib/active_record/connection_adapters/sqlserver_adapter.rb index fe48a8c64..9f8810ed1 100644 --- a/lib/active_record/connection_adapters/sqlserver_adapter.rb +++ b/lib/active_record/connection_adapters/sqlserver_adapter.rb @@ -1,539 +1,577 @@ -require 'base64' -require 'arel/visitors/sqlserver' -require 'active_record' -require 'active_record/base' -require 'active_support/concern' -require 'active_support/core_ext/string' -require 'active_record/connection_adapters/abstract_adapter' -require 'active_record/connection_adapters/sqlserver/core_ext/active_record' -require 'active_record/connection_adapters/sqlserver/core_ext/database_statements' -require 'active_record/connection_adapters/sqlserver/core_ext/explain' -require 'active_record/connection_adapters/sqlserver/core_ext/relation' -require 'active_record/connection_adapters/sqlserver/database_limits' -require 'active_record/connection_adapters/sqlserver/database_statements' -require 'active_record/connection_adapters/sqlserver/errors' -require 'active_record/connection_adapters/sqlserver/schema_cache' -require 'active_record/connection_adapters/sqlserver/schema_statements' -require 'active_record/connection_adapters/sqlserver/showplan' -require 'active_record/connection_adapters/sqlserver/quoting' -require 'active_record/connection_adapters/sqlserver/utils' +# frozen_string_literal: true + +require "tiny_tds" +require "base64" +require "active_record" +require "active_record/connection_adapters/statement_pool" +require "arel_sqlserver" +require "active_record/connection_adapters/sqlserver/core_ext/active_record" +require "active_record/connection_adapters/sqlserver/core_ext/explain" +require "active_record/connection_adapters/sqlserver/core_ext/explain_subscriber" +require "active_record/connection_adapters/sqlserver/core_ext/attribute_methods" +require "active_record/connection_adapters/sqlserver/core_ext/finder_methods" +require "active_record/connection_adapters/sqlserver/core_ext/preloader" +require "active_record/connection_adapters/sqlserver/core_ext/abstract_adapter" +require "active_record/connection_adapters/sqlserver/version" +require "active_record/connection_adapters/sqlserver/type" +require "active_record/connection_adapters/sqlserver/database_limits" +require "active_record/connection_adapters/sqlserver/database_statements" +require "active_record/connection_adapters/sqlserver/database_tasks" +require "active_record/connection_adapters/sqlserver/savepoints" +require "active_record/connection_adapters/sqlserver/transaction" +require "active_record/connection_adapters/sqlserver/errors" +require "active_record/connection_adapters/sqlserver/schema_creation" +require "active_record/connection_adapters/sqlserver/schema_dumper" +require "active_record/connection_adapters/sqlserver/schema_statements" +require "active_record/connection_adapters/sqlserver/sql_type_metadata" +require "active_record/connection_adapters/sqlserver/showplan" +require "active_record/connection_adapters/sqlserver/table_definition" +require "active_record/connection_adapters/sqlserver/quoting" +require "active_record/connection_adapters/sqlserver/utils" +require "active_record/connection_adapters/sqlserver_column" +require "active_record/tasks/sqlserver_database_tasks" module ActiveRecord - - class Base - - def self.sqlserver_connection(config) #:nodoc: - config = config.symbolize_keys - config.reverse_merge! :mode => :dblib - mode = config[:mode].to_s.downcase.underscore.to_sym - case mode - when :dblib - require 'tiny_tds' - when :odbc - raise ArgumentError, 'Missing :dsn configuration.' unless config.has_key?(:dsn) - require 'odbc' - require 'active_record/connection_adapters/sqlserver/core_ext/odbc' - else - raise ArgumentError, "Unknown connection mode in #{config.inspect}." - end - ConnectionAdapters::SQLServerAdapter.new(nil, logger, nil, config.merge(:mode=>mode)) - end - - protected - - def self.did_retry_sqlserver_connection(connection,count) - logger.info "CONNECTION RETRY: #{connection.class.name} retry ##{count}." - end - - def self.did_lose_sqlserver_connection(connection) - logger.info "CONNECTION LOST: #{connection.class.name}" - end - - end - module ConnectionAdapters - - class SQLServerColumn < Column + register "sqlserver", "ActiveRecord::ConnectionAdapters::SQLServerAdapter", "active_record/connection_adapters/sqlserver_adapter" + + class SQLServerAdapter < AbstractAdapter + include SQLServer::Savepoints + include SQLServer::DatabaseTasks + include SQLServer::DatabaseLimits + include SQLServer::SchemaStatements + include SQLServer::Showplan + include SQLServer::DatabaseStatements + include SQLServer::Quoting + include SQLServer::Version + + ADAPTER_NAME = "SQLServer" + + # Default precision for 'time' (See https://docs.microsoft.com/en-us/sql/t-sql/data-types/time-transact-sql) + DEFAULT_TIME_PRECISION = 7 + + attr_reader :spid + + cattr_accessor :cs_equality_operator, instance_accessor: false + cattr_accessor :use_output_inserted, instance_accessor: false + cattr_accessor :exclude_output_inserted_table_names, instance_accessor: false + cattr_accessor :showplan_option, instance_accessor: false + cattr_accessor :lowercase_schema_reflection + + self.cs_equality_operator = "COLLATE Latin1_General_CS_AS_WS" + self.use_output_inserted = true + self.exclude_output_inserted_table_names = Concurrent::Map.new { false } + + NATIVE_DATABASE_TYPES = { + primary_key: "bigint NOT NULL IDENTITY(1,1) PRIMARY KEY", + primary_key_nonclustered: "bigint NOT NULL IDENTITY(1,1) PRIMARY KEY NONCLUSTERED", + integer: {name: "int", limit: 4}, + bigint: {name: "bigint"}, + boolean: {name: "bit"}, + decimal: {name: "decimal"}, + money: {name: "money"}, + smallmoney: {name: "smallmoney"}, + float: {name: "float"}, + real: {name: "real"}, + date: {name: "date"}, + datetime: {name: "datetime"}, + datetime2: {name: "datetime2"}, + datetimeoffset: {name: "datetimeoffset"}, + smalldatetime: {name: "smalldatetime"}, + timestamp: {name: "datetime2(6)"}, + time: {name: "time"}, + char: {name: "char"}, + varchar: {name: "varchar", limit: 8000}, + varchar_max: {name: "varchar(max)"}, + text_basic: {name: "text"}, + nchar: {name: "nchar"}, + string: {name: "nvarchar", limit: 4000}, + text: {name: "nvarchar(max)"}, + ntext: {name: "ntext"}, + binary_basic: {name: "binary"}, + varbinary: {name: "varbinary", limit: 8000}, + binary: {name: "varbinary(max)"}, + uuid: {name: "uniqueidentifier"}, + ss_timestamp: {name: "timestamp"}, + json: {name: "nvarchar(max)"} + } - def initialize(name, default, sql_type = nil, null = true, sqlserver_options = {}) - @sqlserver_options = sqlserver_options.symbolize_keys - super(name, default, sql_type, null) - @primary = @sqlserver_options[:is_identity] || @sqlserver_options[:is_primary] - end - class << self - - def string_to_binary(value) - "0x#{value.unpack("H*")[0]}" + def dbconsole(config, options = {}) + sqlserver_config = config.configuration_hash + args = [] + + args += ["-d", config.database.to_s] if config.database + args += ["-U", sqlserver_config[:username].to_s] if sqlserver_config[:username] + args += ["-P", sqlserver_config[:password].to_s] if sqlserver_config[:password] + + if sqlserver_config[:host] + host_arg = "tcp:#{sqlserver_config[:host]}" + host_arg += ",#{sqlserver_config[:port]}" if sqlserver_config[:port] + args += ["-S", host_arg] + end + + find_cmd_and_exec("sqlcmd", *args) end - - def binary_to_string(value) - value =~ /[^[:xdigit:]]/ ? value : [value].pack('H*') + + def new_client(config) + TinyTds::Client.new(config) + rescue TinyTds::Error => error + if /database .* does not exist/i.match?(error.message) + raise ActiveRecord::NoDatabaseError + else + raise + end + end + + def rails_application_name + Rails.application.class.name.split("::").first + rescue + nil # Might not be in a Rails context so we fallback to `nil`. + end + + def native_database_types # :nodoc: + NATIVE_DATABASE_TYPES end - - end - - def is_identity? - @sqlserver_options[:is_identity] end - - def is_primary? - @sqlserver_options[:is_primary] + + def initialize(...) + super + + @config[:tds_version] = "7.3" unless @config[:tds_version] + @config[:appname] = self.class.rails_application_name unless @config[:appname] + @config[:login_timeout] = @config[:login_timeout].present? ? @config[:login_timeout].to_i : nil + @config[:timeout] = @config[:timeout].present? ? @config[:timeout].to_i / 1000 : nil + @config[:encoding] = @config[:encoding].present? ? @config[:encoding] : nil + + @connection_parameters ||= @config end - - def is_utf8? - !!(@sql_type =~ /nvarchar|ntext|nchar/i) + + # === Abstract Adapter ========================================== # + + def arel_visitor + Arel::Visitors::SQLServer.new(self) end - - def is_integer? - !!(@sql_type =~ /int/i) + + def valid_type?(type) + !native_database_types[type].nil? end - - def is_real? - !!(@sql_type =~ /real/i) + + def schema_creation + SQLServer::SchemaCreation.new(self) end - - def sql_type_for_statement - if is_integer? || is_real? - sql_type.sub(/\((\d+)?\)/,'') - else - sql_type - end + + def supports_ddl_transactions? + true end - - def default_function - @sqlserver_options[:default_function] + + def supports_bulk_alter? + false end - - def table_name - @sqlserver_options[:table_name] + + def supports_advisory_locks? + false end - - def table_klass - @table_klass ||= begin - table_name.classify.constantize - rescue StandardError, NameError, LoadError - nil - end - (@table_klass && @table_klass < ActiveRecord::Base) ? @table_klass : nil + + def supports_index_sort_order? + true end - - def database_year - @sqlserver_options[:database_year] + + def supports_partial_index? + true end - - - private - - def extract_limit(sql_type) - case sql_type - when /^smallint/i - 2 - when /^int/i - 4 - when /^bigint/i - 8 - when /\(max\)/, /decimal/, /numeric/ - nil - else - super - end + + def supports_index_include? + true end - - def simplified_type(field_type) - case field_type - when /real/i then :float - when /money/i then :decimal - when /image/i then :binary - when /bit/i then :boolean - when /uniqueidentifier/i then :string - when /datetime/i then simplified_datetime - when /varchar\(max\)/ then :text - when /timestamp/ then :binary - else super - end + + def supports_expression_index? + false end - - def simplified_datetime - if database_year >= 2008 - :datetime - elsif table_klass && table_klass.coerced_sqlserver_date_columns.include?(name) - :date - elsif table_klass && table_klass.coerced_sqlserver_time_columns.include?(name) - :time - else - :datetime - end + + def supports_explain? + true end - - end #class SQLServerColumn - - class SQLServerAdapter < AbstractAdapter - - include Sqlserver::Quoting - include Sqlserver::DatabaseStatements - include Sqlserver::Showplan - include Sqlserver::SchemaStatements - include Sqlserver::DatabaseLimits - include Sqlserver::Errors - - VERSION = File.read(File.expand_path("../../../../VERSION",__FILE__)).strip - ADAPTER_NAME = 'SQLServer'.freeze - DATABASE_VERSION_REGEXP = /Microsoft SQL Server\s+"?(\d{4}|\w+)"?/ - SUPPORTED_VERSIONS = [2005,2008,2010,2011,2012] - - attr_reader :database_version, :database_year, :spid, :product_level, :product_version, :edition - - cattr_accessor :native_text_database_type, :native_binary_database_type, :native_string_database_type, - :enable_default_unicode_types, :auto_connect, :retry_deadlock_victim, - :cs_equality_operator, :lowercase_schema_reflection, :auto_connect_duration, - :showplan_option - - self.enable_default_unicode_types = true - - - def initialize(connection, logger, pool, config) - super(connection, logger, pool) - # AbstractAdapter Responsibility - @schema_cache = Sqlserver::SchemaCache.new self - @visitor = Arel::Visitors::SQLServer.new self - # Our Responsibility - @config = config - @connection_options = config - connect - @database_version = select_value 'SELECT @@version', 'SCHEMA' - @database_year = begin - if @database_version =~ /Microsoft SQL Azure/i - @sqlserver_azure = true - @database_version.match(/\s(\d{4})\s/)[1].to_i - else - year = DATABASE_VERSION_REGEXP.match(@database_version)[1] - year == "Denali" ? 2011 : year.to_i - end - rescue - 0 - end - @product_level = select_value "SELECT CAST(SERVERPROPERTY('productlevel') AS VARCHAR(128))", 'SCHEMA' - @product_version = select_value "SELECT CAST(SERVERPROPERTY('productversion') AS VARCHAR(128))", 'SCHEMA' - @edition = select_value "SELECT CAST(SERVERPROPERTY('edition') AS VARCHAR(128))", 'SCHEMA' - initialize_dateformatter - use_database - unless SUPPORTED_VERSIONS.include?(@database_year) - raise NotImplementedError, "Currently, only #{SUPPORTED_VERSIONS.to_sentence} are supported. We got back #{@database_version}." - end + + def supports_transaction_isolation? + true end - - # === Abstract Adapter ========================================== # - - def adapter_name - ADAPTER_NAME + + def supports_indexes_in_create? + false end - - def supports_migrations? + + def supports_foreign_keys? true end - - def supports_primary_key? + + def supports_views? true end - - def supports_count_distinct? + + def supports_datetime_with_precision? true end - - def supports_ddl_transactions? + + def supports_check_constraints? true end - - def supports_bulk_alter? + + def supports_json? + version_year >= 2016 + end + + def supports_comments? false end - + + def supports_comments_in_create? + false + end + def supports_savepoints? true end - - def supports_index_sort_order? + + def supports_optimizer_hints? true end - - def supports_explain? + + def supports_common_table_expressions? + true + end + + def supports_lazy_transactions? true end - + + def supports_in_memory_oltp? + version_year >= 2014 + end + + def supports_insert_returning? + true + end + + def supports_insert_on_duplicate_skip? + true + end + + def supports_insert_on_duplicate_update? + true + end + + def supports_insert_conflict_target? + false + end + + def supports_virtual_columns? + true + end + + def return_value_after_insert?(column) # :nodoc: + column.is_primary? || column.is_identity? + end + def disable_referential_integrity - do_execute "EXEC sp_MSforeachtable 'ALTER TABLE ? NOCHECK CONSTRAINT ALL'" + tables = tables_with_referential_integrity + tables.each { |t| execute "ALTER TABLE #{quote_table_name(t)} NOCHECK CONSTRAINT ALL" } yield ensure - do_execute "EXEC sp_MSforeachtable 'ALTER TABLE ? CHECK CONSTRAINT ALL'" + tables.each { |t| execute "ALTER TABLE #{quote_table_name(t)} CHECK CONSTRAINT ALL" } end - + # === Abstract Adapter (Connection Management) ================== # - + def active? - case @connection_options[:mode] - when :dblib - return @connection.active? + if @raw_connection&.active? + verified! + true end - raw_connection_do("SELECT 1") - true - rescue *lost_connection_exceptions + rescue *connection_errors false end - def reconnect! - disconnect! + def reconnect + begin + @raw_connection&.close + rescue + nil + end + @raw_connection = nil + @spid = nil + @collation = nil + connect - active? end def disconnect! - @spid = nil - case @connection_options[:mode] - when :dblib - @connection.close rescue nil - when :odbc - @connection.disconnect rescue nil + super + + begin + @raw_connection&.close + rescue + nil end + @raw_connection = nil + @spid = nil + @collation = nil end - + + def clear_cache!(...) + @view_information = nil + super + end + def reset! - remove_database_connections_and_rollback { } + reset_transaction + execute "IF @@TRANCOUNT > 0 ROLLBACK TRANSACTION" end - + # === Abstract Adapter (Misc Support) =========================== # - - def pk_and_sequence_for(table_name) - idcol = identity_column(table_name) - idcol ? [idcol.name,nil] : nil + + def tables_with_referential_integrity + schemas_and_tables = select_rows <<~SQL.squish + SELECT DISTINCT s.name, o.name + FROM sys.foreign_keys i + INNER JOIN sys.objects o ON i.parent_object_id = o.OBJECT_ID + INNER JOIN sys.schemas s ON o.schema_id = s.schema_id + SQL + schemas_and_tables.map do |schema_table| + schema, table = schema_table + "#{SQLServer::Utils.quoted_raw(schema)}.#{SQLServer::Utils.quoted_raw(table)}" + end end - def primary_key(table_name) - identity_column(table_name).try(:name) || schema_cache.columns[table_name].detect(&:is_primary?).try(:name) + def pk_and_sequence_for(table_name) + pk = primary_key(table_name) + pk ? [pk, nil] : nil end - + # === SQLServer Specific (DB Reflection) ======================== # - + def sqlserver? true end - - def sqlserver_2005? - @database_year == 2005 - end - - def sqlserver_2008? - @database_year == 2008 + + def sqlserver_azure? + !!(sqlserver_version =~ /Azure/i) end - - def sqlserver_2011? - @database_year == 2011 + + def database_prefix_remote_server? + return false if database_prefix.blank? + + name = SQLServer::Utils.extract_identifiers(database_prefix) + name.fully_qualified? && name.object.blank? end - - def sqlserver_2012? - @database_year == 2012 + + def database_prefix + @connection_parameters[:database_prefix] end - - def sqlserver_azure? - @sqlserver_azure + + def database_prefix_identifier(name) + if database_prefix_remote_server? + SQLServer::Utils.extract_identifiers("#{database_prefix}#{name}") + else + SQLServer::Utils.extract_identifiers(name) + end end - + def version self.class::VERSION end - - def inspect - "#<#{self.class} version: #{version}, year: #{@database_year}, product_level: #{@product_level.inspect}, product_version: #{@product_version.inspect}, edition: #{@edition.inspect}, connection_options: #{@connection_options.inspect}>" - end - - def auto_connect - @@auto_connect.is_a?(FalseClass) ? false : true - end - - def auto_connect_duration - @@auto_connect_duration ||= 10 - end - - def retry_deadlock_victim - @@retry_deadlock_victim.is_a?(FalseClass) ? false : true - end - alias :retry_deadlock_victim? :retry_deadlock_victim - - def native_string_database_type - @@native_string_database_type || (enable_default_unicode_types ? 'nvarchar' : 'varchar') - end - - def native_text_database_type - @@native_text_database_type || enable_default_unicode_types ? 'nvarchar(max)' : 'varchar(max)' - end - - def native_time_database_type - sqlserver_2005? ? 'datetime' : 'time' - end - - def native_date_database_type - sqlserver_2005? ? 'datetime' : 'date' + + def get_database_version # :nodoc: + version_year end - - def native_binary_database_type - @@native_binary_database_type || 'varbinary(max)' + + def check_version # :nodoc: + if schema_cache.database_version < 2012 + raise "Your version of SQL Server (#{database_version}) is too old. SQL Server Active Record supports 2012 or higher." + end end - - def cs_equality_operator - @@cs_equality_operator || 'COLLATE Latin1_General_CS_AS_WS' + + class << self + protected + + def initialize_type_map(m) + m.register_type %r{.*}, SQLServer::Type::UnicodeString.new + + # Exact Numerics + register_class_with_limit m, "bigint(8)", SQLServer::Type::BigInteger + m.alias_type "bigint", "bigint(8)" + register_class_with_limit m, "int(4)", SQLServer::Type::Integer + m.alias_type "integer", "int(4)" + m.alias_type "int", "int(4)" + register_class_with_limit m, "smallint(2)", SQLServer::Type::SmallInteger + m.alias_type "smallint", "smallint(2)" + register_class_with_limit m, "tinyint(1)", SQLServer::Type::TinyInteger + m.alias_type "tinyint", "tinyint(1)" + m.register_type "bit", SQLServer::Type::Boolean.new + m.register_type %r{\Adecimal}i do |sql_type| + scale = extract_scale(sql_type) + precision = extract_precision(sql_type) + if scale == 0 + SQLServer::Type::DecimalWithoutScale.new(precision: precision) + else + SQLServer::Type::Decimal.new(precision: precision, scale: scale) + end + end + m.alias_type %r{\Anumeric}i, "decimal" + m.register_type "money", SQLServer::Type::Money.new + m.register_type "smallmoney", SQLServer::Type::SmallMoney.new + + # Approximate Numerics + m.register_type "float", SQLServer::Type::Float.new + m.register_type "real", SQLServer::Type::Real.new + + # Date and Time + m.register_type "date", SQLServer::Type::Date.new + m.register_type %r{\Adatetime} do |sql_type| + precision = extract_precision(sql_type) + if precision + SQLServer::Type::DateTime2.new precision: precision + else + SQLServer::Type::DateTime.new + end + end + m.register_type %r{\Adatetimeoffset}i do |sql_type| + precision = extract_precision(sql_type) + SQLServer::Type::DateTimeOffset.new precision: precision + end + m.register_type "smalldatetime", SQLServer::Type::SmallDateTime.new + m.register_type %r{\Atime}i do |sql_type| + precision = extract_precision(sql_type) || DEFAULT_TIME_PRECISION + SQLServer::Type::Time.new precision: precision + end + + # Character Strings + register_class_with_limit m, %r{\Achar}i, SQLServer::Type::Char + register_class_with_limit m, %r{\Avarchar}i, SQLServer::Type::Varchar + m.register_type "varchar(max)", SQLServer::Type::VarcharMax.new + m.register_type "text", SQLServer::Type::Text.new + + # Unicode Character Strings + register_class_with_limit m, %r{\Anchar}i, SQLServer::Type::UnicodeChar + register_class_with_limit m, %r{\Anvarchar}i, SQLServer::Type::UnicodeVarchar + m.alias_type "string", "nvarchar(4000)" + m.register_type "nvarchar(max)", SQLServer::Type::UnicodeVarcharMax.new + m.register_type "nvarchar(max)", SQLServer::Type::UnicodeVarcharMax.new + m.register_type "ntext", SQLServer::Type::UnicodeText.new + + # Binary Strings + register_class_with_limit m, %r{\Abinary}i, SQLServer::Type::Binary + register_class_with_limit m, %r{\Avarbinary}i, SQLServer::Type::Varbinary + m.register_type "varbinary(max)", SQLServer::Type::VarbinaryMax.new + + # Other Data Types + m.register_type "uniqueidentifier", SQLServer::Type::Uuid.new + m.register_type "timestamp", SQLServer::Type::Timestamp.new + end end - + + TYPE_MAP = Type::TypeMap.new.tap { |m| initialize_type_map(m) } + protected - + # === Abstract Adapter (Misc Support) =========================== # - - def translate_exception(e, message) + + def type_map + TYPE_MAP + end + + def translate_exception(exception, message:, sql:, binds:) case message - when /cannot insert duplicate key .* with unique index/i - RecordNotUnique.new(message,e) - when /conflicted with the foreign key constraint/i - InvalidForeignKey.new(message,e) + when /(SQL Server client is not connected)|(failed to execute statement)/i + ConnectionNotEstablished.new(message, connection_pool: @pool) + when /(cannot insert duplicate key .* with unique index) | (violation of (unique|primary) key constraint)/i + RecordNotUnique.new(message, sql: sql, binds: binds, connection_pool: @pool) + when /(conflicted with the foreign key constraint) | (The DELETE statement conflicted with the REFERENCE constraint)/i + InvalidForeignKey.new(message, sql: sql, binds: binds, connection_pool: @pool) when /has been chosen as the deadlock victim/i - DeadlockVictim.new(message,e) - when *lost_connection_messages - LostConnection.new(message,e) + DeadlockVictim.new(message, sql: sql, binds: binds, connection_pool: @pool) + when /database .* does not exist/i + NoDatabaseError.new(message, connection_pool: @pool) + when /data would be truncated/ + ValueTooLong.new(message, sql: sql, binds: binds, connection_pool: @pool) + when /connection timed out/ + StatementTimeout.new(message, sql: sql, binds: binds, connection_pool: @pool) + when /Column '(.*)' is not the same data type as referencing column '(.*)' in foreign key/ + MismatchedForeignKey.new(message: message, connection_pool: @pool) + when /Cannot insert the value NULL into column.*does not allow nulls/ + NotNullViolation.new(message, sql: sql, binds: binds, connection_pool: @pool) + when /Arithmetic overflow error/ + RangeError.new(message, sql: sql, binds: binds, connection_pool: @pool) + when /statement conflicted with the CHECK constraint/ + CheckViolation.new(message, sql: sql, binds: binds, connection_pool: @pool) else super end end - + # === SQLServer Specific (Connection Management) ================ # - - def connect - config = @connection_options - @connection = case config[:mode] - when :dblib - appname = config[:appname] || configure_application_name || Rails.application.class.name.split('::').first rescue nil - login_timeout = config[:login_timeout].present? ? config[:login_timeout].to_i : nil - timeout = config[:timeout].present? ? config[:timeout].to_i/1000 : nil - encoding = config[:encoding].present? ? config[:encoding] : nil - TinyTds::Client.new({ - :dataserver => config[:dataserver], - :host => config[:host], - :port => config[:port], - :username => config[:username], - :password => config[:password], - :database => config[:database], - :appname => appname, - :login_timeout => login_timeout, - :timeout => timeout, - :encoding => encoding, - :azure => config[:azure] - }).tap do |client| - if config[:azure] - client.execute("SET ANSI_NULLS ON").do - client.execute("SET CURSOR_CLOSE_ON_COMMIT OFF").do - client.execute("SET ANSI_NULL_DFLT_ON ON").do - client.execute("SET IMPLICIT_TRANSACTIONS OFF").do - client.execute("SET ANSI_PADDING ON").do - client.execute("SET QUOTED_IDENTIFIER ON") - client.execute("SET ANSI_WARNINGS ON").do - else - client.execute("SET ANSI_DEFAULTS ON").do - client.execute("SET CURSOR_CLOSE_ON_COMMIT OFF").do - client.execute("SET IMPLICIT_TRANSACTIONS OFF").do - end - client.execute("SET TEXTSIZE 2147483647").do - end - when :odbc - if config[:dsn].include?(';') - driver = ODBC::Driver.new.tap do |d| - d.name = config[:dsn_name] || 'Driver1' - d.attrs = config[:dsn].split(';').map{ |atr| atr.split('=') }.reject{ |kv| kv.size != 2 }.inject({}){ |h,kv| k,v = kv ; h[k] = v ; h } - end - ODBC::Database.new.drvconnect(driver) - else - ODBC.connect config[:dsn], config[:username], config[:password] - end.tap do |c| - begin - c.use_time = true - c.use_utc = ActiveRecord::Base.default_timezone == :utc - rescue Exception => e - warn "Ruby ODBC v0.99992 or higher is required." - end - end - end - @spid = _raw_select("SELECT @@SPID", :fetch => :rows).first.first - configure_connection - rescue - raise unless @auto_connecting - end - - # Override this method so every connection can be configured to your needs. - # For example: - # raw_connection_do "SET TEXTSIZE #{64.megabytes}" - # raw_connection_do "SET CONCAT_NULL_YIELDS_NULL ON" - def configure_connection - end - - # Override this method so every connection can have a unique name. Max 30 characters. Used by TinyTDS only. - # For example: - # "myapp_#{$$}_#{Thread.current.object_id}".to(29) - def configure_application_name + + def connection_errors + @raw_connection_errors ||= [].tap do |errors| + errors << TinyTds::Error if defined?(TinyTds::Error) + end end - + def initialize_dateformatter @database_dateformat = user_options_dateformat a, b, c = @database_dateformat.each_char.to_a - [a,b,c].each { |f| f.upcase! if f == 'y' } + + [a, b, c].each { |f| f.upcase! if f == "y" } dateformat = "%#{a}-%#{b}-%#{c}" ::Date::DATE_FORMATS[:_sqlserver_dateformat] = dateformat ::Time::DATE_FORMATS[:_sqlserver_dateformat] = dateformat + ::Time::DATE_FORMATS[:_sqlserver_time] = "%H:%M:%S" + ::Time::DATE_FORMATS[:_sqlserver_datetime] = "#{dateformat} %H:%M:%S" + ::Time::DATE_FORMATS[:_sqlserver_datetimeoffset] = lambda { |time| + time.strftime "#{dateformat} %H:%M:%S.%9N #{time.formatted_offset}" + } end - - def remove_database_connections_and_rollback(database=nil) - database ||= current_database - do_execute "ALTER DATABASE #{quote_table_name(database)} SET SINGLE_USER WITH ROLLBACK IMMEDIATE" - begin - yield - ensure - do_execute "ALTER DATABASE #{quote_table_name(database)} SET MULTI_USER" - end if block_given? - end - - def with_sqlserver_error_handling - begin - yield - rescue Exception => e - case translate_exception(e,e.message) - when LostConnection; retry if auto_reconnected? - when DeadlockVictim; retry if retry_deadlock_victim? && open_transactions == 0 + + def version_year + @version_year ||= + if /vNext/.match?(sqlserver_version) + 2016 + else + /SQL Server (\d+)/.match(sqlserver_version).to_a.last.to_s.to_i end - raise - end end - - def disable_auto_reconnect - old_auto_connect, self.class.auto_connect = self.class.auto_connect, false - yield - ensure - self.class.auto_connect = old_auto_connect - end - - def auto_reconnected? - return false unless auto_connect - @auto_connecting = true - count = 0 - while count <= (auto_connect_duration / 2) - sleep 2** count - ActiveRecord::Base.did_retry_sqlserver_connection(self,count) - return true if reconnect! - count += 1 - end - ActiveRecord::Base.did_lose_sqlserver_connection(self) - false - ensure - @auto_connecting = false + + def sqlserver_version + @sqlserver_version ||= execute("SELECT @@version", "SCHEMA").rows.first.first.to_s + end + + private + + def connect + @raw_connection = self.class.new_client(@connection_parameters) end - - end #class SQLServerAdapter < AbstractAdapter - - end #module ConnectionAdapters - -end #module ActiveRecord + def configure_connection + if @config[:azure] + @raw_connection.execute("SET ANSI_NULLS ON").do + @raw_connection.execute("SET ANSI_NULL_DFLT_ON ON").do + @raw_connection.execute("SET ANSI_PADDING ON").do + @raw_connection.execute("SET ANSI_WARNINGS ON").do + else + @raw_connection.execute("SET ANSI_DEFAULTS ON").do + end + + @raw_connection.execute("SET QUOTED_IDENTIFIER ON").do + @raw_connection.execute("SET CURSOR_CLOSE_ON_COMMIT OFF").do + @raw_connection.execute("SET IMPLICIT_TRANSACTIONS OFF").do + @raw_connection.execute("SET TEXTSIZE 2147483647").do + @raw_connection.execute("SET CONCAT_NULL_YIELDS_NULL ON").do + + @spid = _raw_select("SELECT @@SPID", @raw_connection).first.first + + initialize_dateformatter + use_database + end + end + end +end diff --git a/lib/active_record/connection_adapters/sqlserver_column.rb b/lib/active_record/connection_adapters/sqlserver_column.rb new file mode 100644 index 000000000..d30ba7444 --- /dev/null +++ b/lib/active_record/connection_adapters/sqlserver_column.rb @@ -0,0 +1,97 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + class Column < ConnectionAdapters::Column + delegate :is_identity, :is_primary, :table_name, :ordinal_position, to: :sql_type_metadata + + def initialize(*, is_identity: nil, is_primary: nil, table_name: nil, ordinal_position: nil, generated_type: nil, **) + super + @is_identity = is_identity + @is_primary = is_primary + @table_name = table_name + @ordinal_position = ordinal_position + @generated_type = generated_type + end + + def is_identity? + is_identity + end + alias_method :auto_incremented_by_db?, :is_identity? + + def is_primary? + is_primary + end + + def is_utf8? + sql_type =~ /nvarchar|ntext|nchar/i + end + + def case_sensitive? + collation&.match(/_CS/) + end + + def virtual? + @generated_type.present? + end + + def virtual_stored? + @generated_type == :stored + end + + def has_default? + super && !virtual? + end + + def init_with(coder) + @is_identity = coder["is_identity"] + @is_primary = coder["is_primary"] + @table_name = coder["table_name"] + @ordinal_position = coder["ordinal_position"] + super + end + + def encode_with(coder) + coder["is_identity"] = @is_identity + coder["is_primary"] = @is_primary + coder["table_name"] = @table_name + coder["ordinal_position"] = @ordinal_position + super + end + + def ==(other) + other.is_a?(Column) && + super && + is_identity? == other.is_identity? && + is_primary? == other.is_primary? && + table_name == other.table_name && + ordinal_position == other.ordinal_position + end + alias_method :eql?, :== + + def hash + [Column, super, is_identity?, is_primary?, table_name, ordinal_position].hash + end + + private + + # In the Rails version of this method there is an assumption that the `default` value will always be a + # `String` class, which must be true for the MySQL/PostgreSQL/SQLite adapters. However, in the SQL Server + # adapter the `default` value can also be Boolean/Date/Time/etc. Changed the implementation of this method + # to handle non-String `default` objects. + def deduplicated + @name = -name + @sql_type_metadata = sql_type_metadata.deduplicate if sql_type_metadata + @default = (default.is_a?(String) ? -default : default.dup.freeze) if default + @default_function = -default_function if default_function + @collation = -collation if collation + @comment = -comment if comment + freeze + end + end + + SQLServerColumn = SQLServer::Column + end + end +end diff --git a/lib/active_record/tasks/sqlserver_database_tasks.rb b/lib/active_record/tasks/sqlserver_database_tasks.rb new file mode 100644 index 000000000..62ca56733 --- /dev/null +++ b/lib/active_record/tasks/sqlserver_database_tasks.rb @@ -0,0 +1,141 @@ +# frozen_string_literal: true + +require "active_record/tasks/database_tasks" +require "shellwords" +require "ipaddr" +require "socket" + +module ActiveRecord + module Tasks + class SQLServerDatabaseTasks < AbstractTasks + DEFAULT_COLLATION = "SQL_Latin1_General_CP1_CI_AS" + + delegate :with_connection, :establish_connection, to: ActiveRecord::Base + + def self.using_database_configurations? + true + end + + def initialize(configuration) + @configuration = configuration + @configuration_hash = @configuration.configuration_hash + end + + def create(master_established = false) + establish_master_connection unless master_established + with_connection do |connection| + connection.create_database(configuration.database, configuration_hash.merge(collation: default_collation)) + end + establish_connection(configuration) + rescue ActiveRecord::StatementInvalid => e + if /database .* already exists/i.match?(e.message) + raise DatabaseAlreadyExists + else + raise + end + end + + def drop + establish_master_connection + with_connection { |connection| connection.drop_database(configuration.database) } + end + + def charset + with_connection { |connection| connection.charset } + end + + def collation + with_connection { |connection| connection.collation } + end + + def purge + clear_active_connections! + drop + create true + end + + def clear_active_connections! + ActiveRecord::Base.connection_handler.clear_active_connections!(:all) + end + + def structure_dump(filename, _extra_flags) + with_connection do |connection| + server_arg = "-S #{Shellwords.escape(configuration_hash[:host])}" + server_arg += ":#{Shellwords.escape(configuration_hash[:port])}" if configuration_hash[:port] + command = [ + "defncopy-ttds", + server_arg, + "-D #{Shellwords.escape(configuration_hash[:database])}", + "-U #{Shellwords.escape(configuration_hash[:username])}", + "-P #{Shellwords.escape(configuration_hash[:password])}", + "-o #{Shellwords.escape(filename)}" + ] + table_args = connection.tables.map { |t| Shellwords.escape(t) } + command.concat(table_args) + view_args = connection.views.map { |v| Shellwords.escape(v) } + command.concat(view_args) + raise "Error dumping database" unless Kernel.system(command.join(" ")) + + dump = File.read(filename) + dump.gsub!(/^USE .*$\nGO\n/, "") # Strip db USE statements + dump.gsub!(/^GO\n/, "") # Strip db GO statements + dump.gsub!("nvarchar(8000)", "nvarchar(4000)") # Fix nvarchar(8000) column defs + dump.gsub!("nvarchar(-1)", "nvarchar(max)") # Fix nvarchar(-1) column defs + dump.gsub!(/text\(\d+\)/, "text") # Fix text(16) column defs + File.open(filename, "w") { |file| file.puts dump } + end + end + + def structure_load(filename, _extra_flags) + with_connection do |connection| + connection.execute File.read(filename) + end + end + + private + + attr_reader :configuration, :configuration_hash + + def default_collation + configuration_hash[:collation] || DEFAULT_COLLATION + end + + def establish_master_connection + establish_connection configuration_hash.merge(database: "master") + end + end + + module DatabaseTasksSQLServer + extend ActiveSupport::Concern + + module ClassMethods + LOCAL_IPADDR = [ + IPAddr.new("192.168.0.0/16"), + IPAddr.new("10.0.0.0/8"), + IPAddr.new("172.16.0.0/12") + ] + + private + + def local_database?(configuration) + super || local_ipaddr?(configuration_host_ip(configuration)) + end + + def configuration_host_ip(configuration) + return nil unless configuration.host + + Socket.getaddrinfo(configuration.host, "echo", Socket::AF_INET)[0][3] + end + + def local_ipaddr?(host_ip) + return false unless host_ip + + LOCAL_IPADDR.any? { |ip| ip.include?(host_ip) } + end + end + end + + DatabaseTasks.register_task %r{sqlserver}, SQLServerDatabaseTasks + DatabaseTasks.send :include, DatabaseTasksSQLServer + end +end diff --git a/lib/activerecord-sqlserver-adapter.rb b/lib/activerecord-sqlserver-adapter.rb index b420ca178..ab2363641 100644 --- a/lib/activerecord-sqlserver-adapter.rb +++ b/lib/activerecord-sqlserver-adapter.rb @@ -1 +1,3 @@ -require 'active_record/connection_adapters/sqlserver_adapter' +# frozen_string_literal: true + +require "active_record/connection_adapters/sqlserver_adapter" diff --git a/lib/arel/visitors/sqlserver.rb b/lib/arel/visitors/sqlserver.rb index f7a1951b6..fcf275d86 100644 --- a/lib/arel/visitors/sqlserver.rb +++ b/lib/arel/visitors/sqlserver.rb @@ -1,389 +1,409 @@ -require 'arel' +# frozen_string_literal: true module Arel + module Visitors + class SQLServer < Arel::Visitors::ToSql + OFFSET = " OFFSET " + ROWS = " ROWS" + FETCH = " FETCH NEXT " + FETCH0 = " FETCH FIRST (SELECT 0) " + ROWS_ONLY = " ROWS ONLY" + + private - module Nodes + # SQLServer ToSql/Visitor (Overrides) - # Extending the Ordering class to be comparrison friendly which allows us to call #uniq on a - # collection of them. See SelectManager#order for more details. - class Ordering < Arel::Nodes::Unary - def hash - expr.hash + BIND_BLOCK = proc { |i| "@#{i - 1}" } + private_constant :BIND_BLOCK + + def bind_block + BIND_BLOCK end - def ==(other) - other.is_a?(Arel::Nodes::Ordering) && self.expr == other.expr + + def visit_Arel_Nodes_Bin(o, collector) + visit o.expr, collector + collector << " #{ActiveRecord::ConnectionAdapters::SQLServerAdapter.cs_equality_operator} " end - def eql?(other) - self == other + + def visit_Arel_Nodes_Concat(o, collector) + visit o.left, collector + collector << " + " + visit o.right, collector end - end - end + # Same as SQLite and PostgreSQL. + def visit_Arel_Nodes_UpdateStatement(o, collector) + collector.retryable = false + o = prepare_update_statement(o) + + collector << "UPDATE " + + # UPDATE with JOIN is in the form of: + # + # UPDATE t1 + # SET .. + # FROM t1 JOIN t2 ON t2.join_id = t1.join_id .. + # WHERE .. + if has_join_sources?(o) + collector = visit o.relation.left, collector + collect_nodes_for o.values, collector, " SET " + collector << " FROM " + collector = inject_join o.relation.right, collector, " " + else + collector = visit o.relation, collector + collect_nodes_for o.values, collector, " SET " + end - class SelectManager < Arel::TreeManager - - AR_CA_SQLSA_NAME = 'ActiveRecord::ConnectionAdapters::SQLServerAdapter'.freeze - - # Getting real Ordering objects is very important for us. We need to be able to call #uniq on - # a colleciton of them reliably as well as using their true object attributes to mutate them - # to grouping objects for the inner sql during a select statment with an offset/rownumber. So this - # is here till ActiveRecord & ARel does this for us instead of using SqlLiteral objects. - alias :order_without_sqlserver :order - def order(*expr) - return order_without_sqlserver(*expr) unless engine_activerecord_sqlserver_adapter? - @ast.orders.concat(expr.map{ |x| - case x - when Arel::Attributes::Attribute - table = Arel::Table.new(x.relation.table_alias || x.relation.name) - e = table[x.name] - Arel::Nodes::Ascending.new e - when Arel::Nodes::Ordering - x - when String - x.split(',').map do |s| - s = x if x.strip =~ /\A\b\w+\b\(.*,.*\)(\s+(ASC|DESC))?\Z/i # Allow functions with comma(s) to pass thru. - s.strip! - d = s =~ /(ASC|DESC)\Z/i ? $1.upcase : nil - e = d.nil? ? s : s.mb_chars[0...-d.length].strip - e = Arel.sql(e) - d && d == "DESC" ? Arel::Nodes::Descending.new(e) : Arel::Nodes::Ascending.new(e) - end + collect_nodes_for o.wheres, collector, " WHERE ", " AND " + collect_nodes_for o.orders, collector, " ORDER BY " + maybe_visit o.limit, collector + maybe_visit o.comment, collector + end + + # Similar to PostgreSQL and SQLite. + def prepare_update_statement(o) + if o.key && has_join_sources?(o) && !has_group_by_and_having?(o) && !has_limit_or_offset_or_orders?(o) + # Join clauses cannot reference the target table, so alias the + # updated table, place the entire relation in the FROM clause, and + # add a self-join (which requires the primary key) + stmt = o.clone + + stmt.relation, stmt.wheres = o.relation.clone, o.wheres.clone + stmt.relation.right = [stmt.relation.left, *stmt.relation.right] + # Don't need to use alias + stmt else - e = Arel.sql(x.to_s) - Arel::Nodes::Ascending.new e + # If using subquery, we need to add limit + o.limit = Nodes::Limit.new(9_223_372_036_854_775_807) if o.orders.any? && o.limit.nil? + + super end - }.flatten) - self - end + end - # A friendly over ride that allows us to put a special lock object that can have a default or pass - # custom string hints down. See the visit_Arel_Nodes_LockWithSQLServer delegation method. - alias :lock_without_sqlserver :lock - def lock(locking=true) - if engine_activerecord_sqlserver_adapter? - case locking - when true - locking = Arel.sql('WITH(HOLDLOCK, ROWLOCK)') - when Arel::Nodes::SqlLiteral - when String - locking = Arel.sql locking + def visit_Arel_Nodes_DeleteStatement(o, collector) + if has_join_and_composite_primary_key?(o) + delete_statement_using_join(o, collector) + else + super end - @ast.lock = Arel::Nodes::Lock.new(locking) - self - else - lock_without_sqlserver(locking) end - end - - private - - def engine_activerecord_sqlserver_adapter? - @engine.connection && @engine.connection.class.name == AR_CA_SQLSA_NAME - end - - end - module Visitors - class SQLServer < Arel::Visitors::ToSql + def has_join_and_composite_primary_key?(o) + has_join_sources?(o) && o.relation.left.instance_variable_get(:@klass).composite_primary_key? + end - private + def delete_statement_using_join(o, collector) + collector.retryable = false - # SQLServer ToSql/Visitor (Overides) + collector << "DELETE " + visit o.relation.left, collector + collector << " FROM " + visit o.relation, collector + collect_nodes_for o.wheres, collector, " WHERE ", " AND " + end + + def visit_Arel_Nodes_Lock(o, collector) + o.expr = Arel.sql("WITH(UPDLOCK)") if /FOR UPDATE/.match?(o.expr.to_s) + collector << " " + visit o.expr, collector + end - def visit_Arel_Nodes_SelectStatement(o) - if complex_count_sql?(o) - visit_Arel_Nodes_SelectStatementForComplexCount(o) - elsif o.offset - visit_Arel_Nodes_SelectStatementWithOffset(o) + def visit_Arel_Nodes_Offset(o, collector) + collector << OFFSET + visit o.expr, collector + collector << ROWS + end + + def visit_Arel_Nodes_Limit(o, collector) + if node_value(o) == 0 + collector << FETCH0 else - visit_Arel_Nodes_SelectStatementWithOutOffset(o) + collector << FETCH + visit o.expr, collector end + collector << ROWS_ONLY end - - def visit_Arel_Nodes_UpdateStatement(o) - if o.orders.any? && o.limit.nil? - o.limit = Nodes::Limit.new(9223372036854775807) - end + + def visit_Arel_Nodes_Grouping(o, collector) + remove_invalid_ordering_from_select_statement(o.expr) super end - def visit_Arel_Nodes_Offset(o) - "WHERE [__rnt].[__rn] > (#{visit o.expr})" + def visit_Arel_Nodes_HomogeneousIn(o, collector) + collector.preparable = false + + visit o.left, collector + + collector << if o.type == :in + " IN (" + else + " NOT IN (" + end + + values = o.casted_values + + # Monkey-patch start. + column_name = o.attribute.name + column_type = o.attribute.relation.type_for_attribute(column_name) + column_type = column_type.cast_type if column_type.is_a?(ActiveRecord::Encryption::EncryptedAttributeType) # Use cast_type on encrypted attributes. Don't encrypt them again + + if values.empty? + collector << @connection.quote(nil) + elsif @connection.prepared_statements && !column_type.serialized? + # Add query attribute bindings rather than just values. + attrs = values.map { |value| ActiveRecord::Relation::QueryAttribute.new(column_name, value, column_type) } + collector.add_binds(attrs, &bind_block) + else + collector.add_binds(values, o.proc_for_binds, &bind_block) + end + # Monkey-patch end. + + collector << ")" end - def visit_Arel_Nodes_Limit(o) - "TOP (#{visit o.expr})" + def visit_Arel_Nodes_SelectStatement(o, collector) + @select_statement = o + optimizer_hints = nil + distinct_One_As_One_Is_So_Not_Fetch o + if o.with + collector = visit o.with, collector + collector << " " + end + collector = o.cores.inject(collector) do |collect, core| + optimizer_hints = core.optimizer_hints if core.optimizer_hints + visit_Arel_Nodes_SelectCore(core, collect) + end + collector = visit_Orders_And_Let_Fetch_Happen o, collector + collector = visit_Make_Fetch_Happen o, collector + collector = maybe_visit optimizer_hints, collector + collector + ensure + @select_statement = nil end - def visit_Arel_Nodes_Lock(o) - visit o.expr + def visit_Arel_Nodes_OptimizerHints(o, collector) + hints = o.expr.map { |v| sanitize_as_option_clause(v) }.join(", ") + collector << "OPTION (#{hints})" end - - def visit_Arel_Nodes_Ordering(o) - if o.respond_to?(:direction) - "#{visit o.expr} #{o.ascending? ? 'ASC' : 'DESC'}" + + def visit_Arel_Table(o, collector) + # Apparently, o.engine.connection can actually be a different adapter + # than sqlserver. Can be removed if fixed in ActiveRecord. See: + # github.com/rails-sqlserver/activerecord-sqlserver-adapter/issues/450 + table_name = + begin + o.class.engine.with_connection do |connection| + if connection.respond_to?(:sqlserver?) && connection.database_prefix_remote_server? + remote_server_table_name(o) + else + quote_table_name(o.name) + end + end + rescue + quote_table_name(o.name) + end + + collector << if o.table_alias + "#{table_name} #{quote_table_name o.table_alias}" else - visit o.expr + table_name end end - - def visit_Arel_Nodes_Bin(o) - "#{visit o.expr} #{@connection.cs_equality_operator}" - end - # SQLServer ToSql/Visitor (Additions) + def visit_Arel_Nodes_JoinSource(o, collector) + if o.left + collector = visit o.left, collector + collector = visit_Arel_Nodes_SelectStatement_SQLServer_Lock collector + end + if o.right.any? + collector << " " if o.left + collector = inject_join o.right, collector, " " + end + collector + end - def visit_Arel_Nodes_SelectStatementWithOutOffset(o, windowed=false) - find_and_fix_uncorrelated_joins_in_select_statement(o) - core = o.cores.first - projections = core.projections - groups = core.groups - orders = o.orders.uniq - if windowed - projections = function_select_statement?(o) ? projections : projections.map { |x| projection_without_expression(x) } - groups = projections.map { |x| projection_without_expression(x) } if windowed_single_distinct_select_statement?(o) && groups.empty? - groups += orders.map { |x| Arel.sql(x.expr) } if windowed_single_distinct_select_statement?(o) - elsif eager_limiting_select_statement?(o) - projections = projections.map { |x| projection_without_expression(x) } - groups = projections.map { |x| projection_without_expression(x) } - orders = orders.map do |x| - expr = Arel.sql projection_without_expression(x.expr) - x.descending? ? Arel::Nodes::Max.new([expr]) : Arel::Nodes::Min.new([expr]) + def visit_Arel_Nodes_InnerJoin(o, collector) + if o.left.is_a?(Arel::Nodes::As) && o.left.left.is_a?(Arel::Nodes::Lateral) + collector << "CROSS " + visit o.left, collector + else + collector << "INNER JOIN " + collector = visit o.left, collector + collector = visit_Arel_Nodes_SelectStatement_SQLServer_Lock collector, space: true + if o.right + collector << " " + visit(o.right, collector) + else + collector end - elsif top_one_everything_for_through_join?(o) - projections = projections.map { |x| projection_without_expression(x) } end - [ ("SELECT" if !windowed), - (visit(core.set_quantifier) if core.set_quantifier && !windowed), - (visit(o.limit) if o.limit && !windowed), - (projections.map{ |x| v = visit(x); v == "1" ? "1 AS [__wrp]" : v }.join(', ')), - (source_with_lock_for_select_statement(o)), - ("WHERE #{core.wheres.map{ |x| visit(x) }.join ' AND ' }" unless core.wheres.empty?), - ("GROUP BY #{groups.map { |x| visit x }.join ', ' }" unless groups.empty?), - (visit(core.having) if core.having), - ("ORDER BY #{orders.map{ |x| visit(x) }.join(', ')}" if !orders.empty? && !windowed) - ].compact.join ' ' - end - - def visit_Arel_Nodes_SelectStatementWithOffset(o) - core = o.cores.first - o.limit ||= Arel::Nodes::Limit.new(9223372036854775807) - orders = rowtable_orders(o) - [ "SELECT", - (visit(o.limit) if o.limit && !windowed_single_distinct_select_statement?(o)), - (rowtable_projections(o).map{ |x| visit(x) }.join(', ')), - "FROM (", - "SELECT #{core.set_quantifier ? 'DISTINCT DENSE_RANK()' : 'ROW_NUMBER()'} OVER (ORDER BY #{orders.map{ |x| visit(x) }.join(', ')}) AS [__rn],", - visit_Arel_Nodes_SelectStatementWithOutOffset(o,true), - ") AS [__rnt]", - (visit(o.offset) if o.offset), - "ORDER BY [__rnt].[__rn] ASC" - ].compact.join ' ' - end - - def visit_Arel_Nodes_SelectStatementForComplexCount(o) - core = o.cores.first - o.limit.expr = Arel.sql("#{o.limit.expr} + #{o.offset ? o.offset.expr : 0}") if o.limit - orders = rowtable_orders(o) - [ "SELECT COUNT([count]) AS [count_id]", - "FROM (", - "SELECT", - (visit(o.limit) if o.limit), - "ROW_NUMBER() OVER (ORDER BY #{orders.map{ |x| visit(x) }.join(', ')}) AS [__rn],", - "1 AS [count]", - (source_with_lock_for_select_statement(o)), - ("WHERE #{core.wheres.map{ |x| visit(x) }.join ' AND ' }" unless core.wheres.empty?), - ("GROUP BY #{core.groups.map { |x| visit x }.join ', ' }" unless core.groups.empty?), - (visit(core.having) if core.having), - ("ORDER BY #{o.orders.map{ |x| visit(x) }.join(', ')}" if !o.orders.empty?), - ") AS [__rnt]", - (visit(o.offset) if o.offset) - ].compact.join ' ' end + def visit_Arel_Nodes_OuterJoin(o, collector) + if o.left.is_a?(Arel::Nodes::As) && o.left.left.is_a?(Arel::Nodes::Lateral) + collector << "OUTER " + visit o.left, collector + else + collector << "LEFT OUTER JOIN " + collector = visit o.left, collector + collector = visit_Arel_Nodes_SelectStatement_SQLServer_Lock collector, space: true + collector << " " + visit o.right, collector + end + end - # SQLServer Helpers - - def source_with_lock_for_select_statement(o) - core = o.cores.first - source = "FROM #{visit(core.source).strip}" if core.source - if source && o.lock - lock = visit o.lock - index = source.match(/FROM [\w\[\]\.]+/)[0].mb_chars.length - source.insert index, " #{lock}" + def visit_Arel_Nodes_In(o, collector) + if Array === o.right + o.right.each { |node| remove_invalid_ordering_from_select_statement(node) } else - source + remove_invalid_ordering_from_select_statement(o.right) end + + super end - def table_from_select_statement(o) - core = o.cores.first - # TODO: [ARel 2.2] Use #from/#source vs. #froms - # if Arel::Table === core.from - # core.from - # elsif Arel::Nodes::SqlLiteral === core.from - # Arel::Table.new(core.from, @engine) - # elsif Arel::Nodes::JoinSource === core.source - # Arel::Nodes::SqlLiteral === core.source.left ? Arel::Table.new(core.source.left, @engine) : core.source.left - # end - table_finder = lambda { |x| - case x - when Arel::Table - x - when Arel::Nodes::SqlLiteral - Arel::Table.new(x, @engine) - when Arel::Nodes::Join - table_finder.call(x.left) - end - } - table_finder.call(core.froms) + def collect_optimizer_hints(o, collector) + collector end - def single_distinct_select_statement?(o) - projections = o.cores.first.projections - p1 = projections.first - projections.size == 1 && - ((p1.respond_to?(:distinct) && p1.distinct) || - p1.respond_to?(:include?) && p1.include?('DISTINCT')) + def visit_Arel_Nodes_WithRecursive(o, collector) + collector << "WITH " + collect_ctes(o.children, collector) end - - def windowed_single_distinct_select_statement?(o) - o.limit && o.offset && single_distinct_select_statement?(o) + + # SQLServer ToSql/Visitor (Additions) + + def visit_Arel_Nodes_SelectStatement_SQLServer_Lock(collector, options = {}) + if select_statement_lock? + collector = visit @select_statement.lock, collector + collector << " " if options[:space] + end + collector end - - def single_distinct_select_everything_statement?(o) - single_distinct_select_statement?(o) && visit(o.cores.first.projections.first).ends_with?(".*") + + def visit_Orders_And_Let_Fetch_Happen(o, collector) + make_Fetch_Possible_And_Deterministic o + if o.orders.any? + collector << " ORDER BY " + len = o.orders.length - 1 + o.orders.each_with_index { |x, i| + collector = visit(x, collector) + collector << ", " unless len == i + } + end + collector end - - def top_one_everything_for_through_join?(o) - single_distinct_select_everything_statement?(o) && - (o.limit && !o.offset) && - join_in_select_statement?(o) + + def visit_Make_Fetch_Happen(o, collector) + o.offset = Nodes::Offset.new(0) if o.limit && !o.offset + collector = visit o.offset, collector if o.offset + collector = visit o.limit, collector if o.limit + collector end - def all_projections_aliased_in_select_statement?(o) - projections = o.cores.first.projections - projections.all? do |x| - visit(x).split(',').all? { |y| y.include?(' AS ') } + def visit_Arel_Nodes_Lateral(o, collector) + collector << "APPLY" + collector << " " + if o.expr.is_a?(Arel::Nodes::SelectStatement) + collector << "(" + visit(o.expr, collector) + collector << ")" + else + visit(o.expr, collector) end end - def function_select_statement?(o) - core = o.cores.first - core.projections.any? { |x| Arel::Nodes::Function === x } + # SQLServer Helpers + + def node_value(node) + return nil unless node + + case node.expr + when NilClass then nil + when Numeric then node.expr + when Arel::Nodes::Unary then node.expr.expr + end end - def eager_limiting_select_statement?(o) - core = o.cores.first - single_distinct_select_statement?(o) && - (o.limit && !o.offset) && - core.groups.empty? && - !single_distinct_select_everything_statement?(o) + def select_statement_lock? + @select_statement&.lock end - def join_in_select_statement?(o) - core = o.cores.first - core.source.right.any? { |x| Arel::Nodes::Join === x } + def make_Fetch_Possible_And_Deterministic(o) + return if o.limit.nil? && o.offset.nil? + return if o.orders.any? + + t = table_From_Statement o + pk = primary_Key_From_Table t + return unless pk + + # Prefer deterministic vs a simple `(SELECT NULL)` expr. + o.orders = [pk.asc] end - def complex_count_sql?(o) + def distinct_One_As_One_Is_So_Not_Fetch(o) core = o.cores.first - core.projections.size == 1 && - Arel::Nodes::Count === core.projections.first && - o.limit && - !join_in_select_statement?(o) + distinct = Nodes::Distinct === core.set_quantifier + oneasone = core.projections.all? { |x| x == ActiveRecord::FinderMethods::ONE_AS_ONE } + limitone = [nil, 0, 1].include? node_value(o.limit) + if distinct && oneasone && limitone && !o.offset + core.projections = [Arel.sql("TOP(1) 1 AS [one]")] + o.limit = nil + end end - - def select_primary_key_sql?(o) + + def table_From_Statement(o) core = o.cores.first - return false if core.projections.size != 1 - p = core.projections.first - t = table_from_select_statement(o) - Arel::Attributes::Attribute === p && t.primary_key && t.primary_key.name == p.name + if Arel::Table === core.from + core.from + elsif Arel::Nodes::SqlLiteral === core.from + Arel::Table.new(core.from) + elsif Arel::Nodes::JoinSource === core.source + (Arel::Nodes::SqlLiteral === core.source.left) ? Arel::Table.new(core.source.left, @engine) : core.source.left.left + end end - def find_and_fix_uncorrelated_joins_in_select_statement(o) - core = o.cores.first - # TODO: [ARel 2.2] Use #from/#source vs. #froms - # return if !join_in_select_statement?(o) || core.source.right.size != 2 - # j1 = core.source.right.first - # j2 = core.source.right.second - # return unless Arel::Nodes::OuterJoin === j1 && Arel::Nodes::StringJoin === j2 - # j1_tn = j1.left.name - # j2_tn = j2.left.match(/JOIN \[(.*)\].*ON/).try(:[],1) - # return unless j1_tn == j2_tn - # crltd_tn = "#{j1_tn}_crltd" - # j1.left.table_alias = crltd_tn - # j1.right.expr.left.relation.table_alias = crltd_tn - return if !join_in_select_statement?(o) || !(Arel::Nodes::StringJoin === core.froms) - j1 = core.froms.left - j2 = core.froms.right - return unless Arel::Nodes::OuterJoin === j1 && Arel::Nodes::SqlLiteral === j2 && j2.include?('JOIN ') - j1_tn = j1.right.name - j2_tn = j2.match(/JOIN \[(.*)\].*ON/).try(:[],1) - return unless j1_tn == j2_tn - on_index = j2.index(' ON ') - j2.insert on_index, " AS [#{j2_tn}_crltd]" - j2.sub! "[#{j2_tn}].", "[#{j2_tn}_crltd]." - end - - def rowtable_projections(o) - core = o.cores.first - if windowed_single_distinct_select_statement?(o) && core.groups.blank? - tn = table_from_select_statement(o).name - core.projections.map do |x| - x.dup.tap do |p| - p.sub! 'DISTINCT', '' - p.insert 0, visit(o.limit) if o.limit - p.gsub! /\[?#{tn}\]?\./, '[__rnt].' - p.strip! - end - end - elsif single_distinct_select_statement?(o) - tn = table_from_select_statement(o).name - core.projections.map do |x| - x.dup.tap do |p| - p.sub! 'DISTINCT', "DISTINCT #{visit(o.limit)}".strip if o.limit - p.gsub! /\[?#{tn}\]?\./, '[__rnt].' - p.strip! - end - end - elsif join_in_select_statement?(o) && all_projections_aliased_in_select_statement?(o) - core.projections.map do |x| - Arel.sql visit(x).split(',').map{ |y| y.split(' AS ').last.strip }.join(', ') - end - elsif select_primary_key_sql?(o) - [Arel.sql("[__rnt].#{quote_column_name(core.projections.first.name)}")] - else - [Arel.sql('[__rnt].*')] + def primary_Key_From_Table(t) + return unless t + + primary_keys = @connection.schema_cache.primary_keys(t.name) + column_name = nil + + case primary_keys + when NilClass + column_name = @connection.schema_cache.columns_hash(t.name).first.try(:second).try(:name) + when String + column_name = primary_keys + when Array + candidate_columns = @connection.schema_cache.columns_hash(t.name).slice(*primary_keys).values + candidate_column = candidate_columns.find(&:is_identity?) + candidate_column ||= candidate_columns.first + column_name = candidate_column.try(:name) end + + column_name ? t[column_name] : nil end - def rowtable_orders(o) - core = o.cores.first - if !o.orders.empty? - o.orders - else - t = table_from_select_statement(o) - c = t.primary_key || t.columns.first - [c.asc] - end.uniq + def remote_server_table_name(o) + o.class.engine.with_connection do |connection| + ActiveRecord::ConnectionAdapters::SQLServer::Utils.extract_identifiers( + "#{connection.database_prefix}#{o.name}" + ).quoted + end end - # TODO: We use this for grouping too, maybe make Grouping objects vs SqlLiteral. - def projection_without_expression(projection) - Arel.sql(visit(projection).split(',').map do |x| - x.strip! - x.sub!(/^(COUNT|SUM|MAX|MIN|AVG)\s*(\((.*)\))?/,'\3') - x.sub!(/^DISTINCT\s*/,'') - x.sub!(/TOP\s*\(\d+\)\s*/i,'') - x.strip - end.join(', ')) + # Need to remove ordering from sub-queries unless TOP/OFFSET also used. Otherwise, SQLServer + # returns error "The ORDER BY clause is invalid in views, inline functions, derived tables, + # sub-queries, and common table expressions, unless TOP, OFFSET or FOR XML is also specified." + def remove_invalid_ordering_from_select_statement(node) + return unless Arel::Nodes::SelectStatement === node + + node.orders = [] unless node.offset || node.limit end + def sanitize_as_option_clause(value) + value.gsub(%r{OPTION \s* \( (.+) \)}xi, "\\1") + end end end - end - -Arel::Visitors::VISITORS['sqlserver'] = Arel::Visitors::SQLServer diff --git a/lib/arel_sqlserver.rb b/lib/arel_sqlserver.rb new file mode 100644 index 000000000..e6c7760ac --- /dev/null +++ b/lib/arel_sqlserver.rb @@ -0,0 +1,4 @@ +# frozen_string_literal: true + +require "arel" +require "arel/visitors/sqlserver" diff --git a/test/appveyor/dbsetup.ps1 b/test/appveyor/dbsetup.ps1 new file mode 100644 index 000000000..d5d108432 --- /dev/null +++ b/test/appveyor/dbsetup.ps1 @@ -0,0 +1,27 @@ + +Write-Output "Setting up..." +[reflection.assembly]::LoadWithPartialName("Microsoft.SqlServer.Smo") | Out-Null +[reflection.assembly]::LoadWithPartialName("Microsoft.SqlServer.SqlWmiManagement") | Out-Null + +Write-Output "Setting variables..." +$serverName = $env:COMPUTERNAME +$instanceNames = @('SQL2014') +$smo = 'Microsoft.SqlServer.Management.Smo.' +$wmi = new-object ($smo + 'Wmi.ManagedComputer') + +Write-Output "Configure Instances..." +foreach ($instanceName in $instanceNames) { + Write-Output "Instance $instanceName ..." + Write-Output "Enable TCP/IP and port 1433..." + $uri = "ManagedComputer[@Name='$serverName']/ServerInstance[@Name='$instanceName']/ServerProtocol[@Name='Tcp']" + $tcp = $wmi.GetSmoObject($uri) + $tcp.IsEnabled = $true + foreach ($ipAddress in $Tcp.IPAddresses) { + $ipAddress.IPAddressProperties["TcpDynamicPorts"].Value = "" + $ipAddress.IPAddressProperties["TcpPort"].Value = "1433" + } + $tcp.Alter() +} + +Set-Service SQLBrowser -StartupType Manual +Start-Service SQLBrowser diff --git a/test/appveyor/dbsetup.sql b/test/appveyor/dbsetup.sql new file mode 100644 index 000000000..1619f9e38 --- /dev/null +++ b/test/appveyor/dbsetup.sql @@ -0,0 +1,11 @@ +CREATE DATABASE [activerecord_unittest]; +CREATE DATABASE [activerecord_unittest2]; +GO +CREATE LOGIN [rails] WITH PASSWORD = '', CHECK_POLICY = OFF, DEFAULT_DATABASE = [activerecord_unittest]; +GO +USE [activerecord_unittest]; +CREATE USER [rails] FOR LOGIN [rails]; +GO +EXEC sp_addrolemember N'db_owner', N'rails'; +EXEC master..sp_addsrvrolemember @loginame = N'rails', @rolename = N'sysadmin' +GO diff --git a/test/bin/install-freetds.sh b/test/bin/install-freetds.sh new file mode 100755 index 000000000..755ff928a --- /dev/null +++ b/test/bin/install-freetds.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash + +set -x +set -e + +FREETDS_VERSION=1.00.21 + +wget http://www.freetds.org/files/stable/freetds-$FREETDS_VERSION.tar.gz +tar -xzf freetds-$FREETDS_VERSION.tar.gz +cd freetds-$FREETDS_VERSION +./configure --prefix=/opt/local \ + --with-openssl=/opt/local \ + --with-tdsver=7.3 +make +make install +cd .. +rm -rf freetds-$FREETDS_VERSION +rm freetds-$FREETDS_VERSION.tar.gz diff --git a/test/bin/install-openssl.sh b/test/bin/install-openssl.sh new file mode 100755 index 000000000..9f9851a17 --- /dev/null +++ b/test/bin/install-openssl.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +set -x +set -e + +OPENSSL_VERSION=1.0.2j + +wget https://www.openssl.org/source/openssl-$OPENSSL_VERSION.tar.gz +tar -xzf openssl-$OPENSSL_VERSION.tar.gz +cd openssl-$OPENSSL_VERSION +./config --prefix=/opt/local +make +make install +cd .. +rm -rf openssl-$OPENSSL_VERSION +rm openssl-$OPENSSL_VERSION.tar.gz diff --git a/test/bin/setup.sh b/test/bin/setup.sh new file mode 100755 index 000000000..b312d0cf3 --- /dev/null +++ b/test/bin/setup.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +set -x +set -e + +tag=2017-GA + +docker pull metaskills/mssql-server-linux-rails:$tag + +container=$(docker ps -a -q --filter ancestor=metaskills/mssql-server-linux-rails:$tag) +if [[ -z $container ]]; then + docker run -p 1433:1433 -d metaskills/mssql-server-linux-rails:$tag && sleep 10 + exit +fi + +container=$(docker ps -q --filter ancestor=metaskills/mssql-server-linux-rails:$tag) +if [[ -z $container ]]; then + docker start $container && sleep 10 +fi diff --git a/test/cases/aaaa_create_tables_test_sqlserver.rb b/test/cases/aaaa_create_tables_test_sqlserver.rb deleted file mode 100644 index 6aa788455..000000000 --- a/test/cases/aaaa_create_tables_test_sqlserver.rb +++ /dev/null @@ -1,19 +0,0 @@ -# The filename begins with "aaaa" to ensure this is the first test. -require 'cases/sqlserver_helper' - -class AAAACreateTablesTestSqlserver < ActiveRecord::TestCase - - self.use_transactional_fixtures = false - - should 'load activerecord schema then sqlserver specific schema' do - # Core AR. - schema_file = "#{ACTIVERECORD_TEST_ROOT}/schema/schema.rb" - eval(File.read(schema_file)) - assert true - # SQL Server. - sqlserver_specific_schema_file = "#{SQLSERVER_SCHEMA_ROOT}/sqlserver_specific_schema.rb" - eval(File.read(sqlserver_specific_schema_file)) - assert true - end - -end diff --git a/test/cases/active_schema_test_sqlserver.rb b/test/cases/active_schema_test_sqlserver.rb new file mode 100644 index 000000000..dcc87c9d3 --- /dev/null +++ b/test/cases/active_schema_test_sqlserver.rb @@ -0,0 +1,149 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class ActiveSchemaTestSQLServer < ActiveRecord::TestCase + describe "indexes" do + before do + connection.create_table :schema_test_table, force: true, id: false do |t| + t.column :foo, :string, limit: 100 + t.column :state, :string + end + end + + after do + connection.drop_table :schema_test_table + rescue + nil + end + + it "default index" do + assert_queries_match("CREATE INDEX [index_schema_test_table_on_foo] ON [schema_test_table] ([foo])") do + connection.add_index :schema_test_table, "foo" + end + end + + it "unique index" do + assert_queries_match("CREATE UNIQUE INDEX [index_schema_test_table_on_foo] ON [schema_test_table] ([foo])") do + connection.add_index :schema_test_table, "foo", unique: true + end + end + + it "where condition on index" do + assert_queries_match("CREATE INDEX [index_schema_test_table_on_foo] ON [schema_test_table] ([foo]) WHERE state = 'active'") do + connection.add_index :schema_test_table, "foo", where: "state = 'active'" + end + end + + it "if index does not exist" do + assert_queries_match("IF NOT EXISTS (SELECT name FROM sysindexes WHERE name = 'index_schema_test_table_on_foo') " \ + "CREATE INDEX [index_schema_test_table_on_foo] ON [schema_test_table] ([foo])") do + connection.add_index :schema_test_table, "foo", if_not_exists: true + end + end + + it "clustered index" do + assert_queries_match("CREATE CLUSTERED INDEX [index_schema_test_table_on_foo] ON [schema_test_table] ([foo])") do + connection.add_index :schema_test_table, "foo", type: :clustered + end + end + + it "nonclustered index" do + assert_queries_match("CREATE NONCLUSTERED INDEX [index_schema_test_table_on_foo] ON [schema_test_table] ([foo])") do + connection.add_index :schema_test_table, "foo", type: :nonclustered + end + end + end + + describe "collation" do + it "create column with NOT NULL and COLLATE" do + assert_nothing_raised do + connection.create_table :not_null_with_collation_table, force: true, id: false do |t| + t.text :not_null_text_with_collation, null: false, collation: "Latin1_General_CS_AS" + end + end + ensure + begin + connection.drop_table :not_null_with_collation_table + rescue + nil + end + end + end + + describe "datetimeoffset precision" do + it "valid precisions are correct" do + assert_nothing_raised do + connection.create_table :datetimeoffset_precisions do |t| + t.datetimeoffset :precision_default + t.datetimeoffset :precision_5, precision: 5 + t.datetimeoffset :precision_7, precision: 7 + end + end + + columns = connection.columns("datetimeoffset_precisions") + + assert_equal columns.find { |column| column.name == "precision_default" }.precision, 7 + assert_equal columns.find { |column| column.name == "precision_5" }.precision, 5 + assert_equal columns.find { |column| column.name == "precision_7" }.precision, 7 + ensure + begin + connection.drop_table :datetimeoffset_precisions + rescue + nil + end + end + + it "invalid precision raises exception" do + assert_raise(ActiveRecord::ActiveRecordError) do + connection.create_table :datetimeoffset_precisions do |t| + t.datetimeoffset :precision_8, precision: 8 + end + end + ensure + begin + connection.drop_table :datetimeoffset_precisions + rescue + nil + end + end + end + + describe "time precision" do + it "valid precisions are correct" do + assert_nothing_raised do + connection.create_table :time_precisions do |t| + t.time :precision_default + t.time :precision_5, precision: 5 + t.time :precision_7, precision: 7 + end + end + + columns = connection.columns("time_precisions") + + assert_equal columns.find { |column| column.name == "precision_default" }.precision, 7 + assert_equal columns.find { |column| column.name == "precision_5" }.precision, 5 + assert_equal columns.find { |column| column.name == "precision_7" }.precision, 7 + ensure + begin + connection.drop_table :time_precisions + rescue + nil + end + end + + it "invalid precision raises exception" do + assert_raise(ActiveRecord::ActiveRecordError) do + connection.create_table :time_precisions do |t| + t.time :precision_8, precision: 8 + end + end + ensure + begin + connection.drop_table :time_precisions + rescue + nil + end + end + end +end diff --git a/test/cases/adapter_test_sqlserver.rb b/test/cases/adapter_test_sqlserver.rb index c307798bb..6d5c87d61 100644 --- a/test/cases/adapter_test_sqlserver.rb +++ b/test/cases/adapter_test_sqlserver.rb @@ -1,792 +1,691 @@ -require 'cases/sqlserver_helper' -require 'models/task' -require 'models/reply' -require 'models/joke' -require 'models/subscriber' -require 'models/minimalistic' -require 'models/post' - -class AdapterTestSqlserver < ActiveRecord::TestCase - - fixtures :tasks, :posts - - def setup - @connection = ActiveRecord::Base.connection - @basic_insert_sql = "INSERT INTO [funny_jokes] ([name]) VALUES('Knock knock')" - @basic_update_sql = "UPDATE [customers] SET [address_street] = NULL WHERE [id] = 2" - @basic_select_sql = "SELECT * FROM [customers] WHERE ([customers].[id] = 1)" +# frozen_string_literal: true + +require "cases/helper_sqlserver" +require "models/topic" +require "models/task" +require "models/post" +require "models/subscriber" +require "models/minimalistic" +require "models/college" +require "models/dog" +require "models/other_dog" +require "models/discount" + +class AdapterTestSQLServer < ActiveRecord::TestCase + fixtures :tasks + + let(:arunit_connection) { Topic.lease_connection } + let(:arunit2_connection) { College.lease_connection } + let(:arunit_database) { arunit_connection.pool.db_config.database } + let(:arunit2_database) { arunit2_connection.pool.db_config.database } + + let(:basic_insert_sql) { "INSERT INTO [funny_jokes] ([name]) VALUES('Knock knock')" } + let(:basic_merge_sql) { "MERGE INTO [ships] WITH (UPDLOCK, HOLDLOCK) AS target USING ( SELECT * FROM ( SELECT [id], [name], ROW_NUMBER() OVER ( PARTITION BY [id] ORDER BY [id] DESC ) AS rn_0 FROM ( VALUES (101, N'RSS Sir David Attenborough') ) AS t1 ([id], [name]) ) AS ranked_source WHERE rn_0 = 1 ) AS source ON (target.[id] = source.[id]) WHEN MATCHED THEN UPDATE SET target.[name] = source.[name]" } + let(:basic_update_sql) { "UPDATE [customers] SET [address_street] = NULL WHERE [id] = 2" } + let(:basic_select_sql) { "SELECT * FROM [customers] WHERE ([customers].[id] = 1)" } + + it "has basic and non-sensitive information in the adapters inspect method" do + string = connection.inspect + _(string).must_match %r{ActiveRecord::ConnectionAdapters::SQLServerAdapter} + _(string).wont_match %r{host} + _(string).wont_match %r{password} + _(string).wont_match %r{username} + _(string).wont_match %r{port} end - - context 'For abstract behavior' do - - should 'have a 128 max #table_alias_length' do - assert @connection.table_alias_length <= 128 - end - - should 'raise invalid statement error' do - assert_raise(ActiveRecord::StatementInvalid) { Topic.connection.update("UPDATE XXX") } - end - - should 'be our adapter_name' do - assert_equal 'SQLServer', @connection.adapter_name - end - - should 'include version in inspect' do - assert_match(/version\: \d.\d/,@connection.inspect) - end - - should 'include database product level in inspect' do - assert_match(/product_level\: "\w+/, @connection.inspect) - end - - should 'include database product version in inspect' do - assert_match(/product_version\: "\d+/, @connection.inspect) - end - - should 'include database edition in inspect' do - assert_match(/edition\: "\w+/, @connection.inspect) - end - - should 'set database product level' do - assert_match(/\w+/, @connection.product_level) - end - - should 'set database product version' do - assert_match(/\d+/, @connection.product_version) - end - - should 'set database edition' do - assert_match(/\w+/, @connection.edition) - end - - should 'support migrations' do - assert @connection.supports_migrations? - end - - should 'support DDL in transactions' do - assert @connection.supports_ddl_transactions? - end - - should 'allow owner table name prefixs like dbo. to still allow table_exists? to return true' do + + it "has a 128 max #table_alias_length" do + assert connection.table_alias_length <= 128 + end + + it "raises invalid statement error for bad SQL" do + assert_raise(ActiveRecord::StatementInvalid) { Topic.lease_connection.update("UPDATE XXX") } + end + + it "is has our adapter_name" do + assert_equal "SQLServer", connection.adapter_name + end + + it "support DDL in transactions" do + assert connection.supports_ddl_transactions? + end + + it "table exists works if table name prefixed by schema and owner" do + assert_equal "topics", Topic.table_name + assert Topic.table_exists? + + # Test when owner included in table name. + Topic.table_name = "dbo.topics" + assert Topic.table_exists?, "Topics table name of 'dbo.topics' should return true for exists." + + # Test when database and owner included in table name. + Topic.table_name = "#{arunit_database}.dbo.topics" + assert Topic.table_exists?, "Topics table name of '[DATABASE].dbo.topics' should return true for exists." + ensure + Topic.table_name = "topics" + end + + it "test table existence across database schemas" do + arunit_connection = Topic.lease_connection + arunit2_connection = College.lease_connection + + arunit_database = arunit_connection.pool.db_config.database + arunit2_database = arunit2_connection.pool.db_config.database + + # Assert that connections use different default databases schemas. + assert_not_equal arunit_database, arunit2_database + + # Assert that the Topics table exists when using the Topics connection. + assert arunit_connection.table_exists?("topics"), "Topics table exists using table name" + assert arunit_connection.table_exists?("dbo.topics"), "Topics table exists using owner and table name" + assert arunit_connection.table_exists?("#{arunit_database}.dbo.topics"), "Topics table exists using database, owner and table name" + + # Assert that the Colleges table exists when using the Colleges connection. + assert arunit2_connection.table_exists?("colleges"), "College table exists using table name" + assert arunit2_connection.table_exists?("dbo.colleges"), "College table exists using owner and table name" + assert arunit2_connection.table_exists?("#{arunit2_database}.dbo.colleges"), "College table exists using database, owner and table name" + + # Assert that the tables exist when using each others connection. + assert arunit_connection.table_exists?("#{arunit2_database}.dbo.colleges"), "Colleges table exists using Topics connection" + assert arunit2_connection.table_exists?("#{arunit_database}.dbo.topics"), "Topics table exists using Colleges connection" + end + + it "return true to insert sql query for inserts only" do + assert connection.send(:insert_sql?, "INSERT...") + assert connection.send(:insert_sql?, "EXEC sp_executesql N'INSERT INTO [fk_test_has_fks] ([fk_id]) VALUES (@0); SELECT CAST(SCOPE_IDENTITY() AS bigint) AS Ident', N'@0 int', @0 = 0") + assert !connection.send(:insert_sql?, "UPDATE...") + assert !connection.send(:insert_sql?, "SELECT...") + end + + it "return unquoted table name object from basic INSERT UPDATE and SELECT statements" do + assert_equal "funny_jokes", connection.send(:get_table_name, basic_insert_sql) + assert_equal "ships", connection.send(:get_table_name, basic_merge_sql) + assert_equal "customers", connection.send(:get_table_name, basic_update_sql) + assert_equal "customers", connection.send(:get_table_name, basic_select_sql) + end + + it "test bad connection" do + assert_raise ActiveRecord::NoDatabaseError do + db_config = ActiveRecord::Base.configurations.configs_for(env_name: "arunit", name: "primary") + configuration = db_config.configuration_hash.merge(database: "nonexistent_activerecord_unittest") + connection = ActiveRecord::ConnectionAdapters::SQLServerAdapter.new(configuration) + connection.exec_query("SELECT 1") + end + end + + it "test database exists returns false if database does not exist" do + db_config = ActiveRecord::Base.configurations.configs_for(env_name: "arunit", name: "primary") + configuration = db_config.configuration_hash.merge(database: "nonexistent_activerecord_unittest") + assert_not ActiveRecord::ConnectionAdapters::SQLServerAdapter.database_exists?(configuration), + "expected database #{configuration[:database]} to not exist" + end + + it "test database exists returns true when the database exists" do + db_config = ActiveRecord::Base.configurations.configs_for(env_name: "arunit", name: "primary") + assert ActiveRecord::ConnectionAdapters::SQLServerAdapter.database_exists?(db_config.configuration_hash), + "expected database #{db_config.database} to exist" + end + + it "test primary key violation" do + Post.create!(id: 0, title: "Setup", body: "Create post with primary key of zero") + + assert_raise ActiveRecord::RecordNotUnique do + Post.create!(id: 0, title: "Test", body: "Try to create another post with primary key of zero") + end + end + + describe "with different language" do + before do + @default_language = connection.user_options_language + end + + after do begin - assert_equal 'tasks', Task.table_name - assert Task.table_exists? - Task.table_name = 'dbo.tasks' - assert Task.table_exists?, 'Tasks table name of dbo.tasks should return true for exists.' - ensure - Task.table_name = 'tasks' + connection.execute("SET LANGUAGE #{@default_language}") + rescue + nil end + connection.send :initialize_dateformatter end - - context 'for database version' do - - setup do - @version_regexp = ActiveRecord::ConnectionAdapters::SQLServerAdapter::DATABASE_VERSION_REGEXP - @supported_version = ActiveRecord::ConnectionAdapters::SQLServerAdapter::SUPPORTED_VERSIONS - @sqlserver_2005_string = "Microsoft SQL Server 2005 - 9.00.3215.00 (Intel X86)" - @sqlserver_2008_string = "Microsoft SQL Server 2008 (RTM) - 10.0.1600.22 (Intel X86)" - @sqlserver_2011_string1 = %|Microsoft SQL Server "Denali" (CTP1) - 11.0.1103.9 (Intel X86) Sep 24 2010 22:02:43 Copyright (c) Microsoft Corporation Enterprise Evaluation Edition on Windows NT 6.0 (Build 6002: Service Pack 2)| - end - - should 'return a string from #database_version that matches class regexp' do - assert_match @version_regexp, @connection.database_version - end unless sqlserver_azure? - - should 'return a 4 digit year fixnum for #database_year' do - assert_instance_of Fixnum, @connection.database_year - assert_contains @supported_version, @connection.database_year - end - - should 'return a code name if year not available' do - assert_equal "Denali", @version_regexp.match(@sqlserver_2011_string1)[1] - end - - end - - context 'for Utils.unqualify_table_name and Utils.unqualify_db_name' do - - setup do - @expected_table_name = 'baz' - @expected_db_name = 'foo' - @first_second_table_names = ['[baz]','baz','[bar].[baz]','bar.baz'] - @third_table_names = ['[foo].[bar].[baz]','foo.bar.baz'] - @qualifed_table_names = @first_second_table_names + @third_table_names - end - - should 'return clean table_name from Utils.unqualify_table_name' do - @qualifed_table_names.each do |qtn| - assert_equal @expected_table_name, - ActiveRecord::ConnectionAdapters::Sqlserver::Utils.unqualify_table_name(qtn), - "This qualifed_table_name #{qtn} did not unqualify correctly." - end - end - - should 'return nil from Utils.unqualify_db_name when table_name is less than 2 qualified' do - @first_second_table_names.each do |qtn| - assert_equal nil, ActiveRecord::ConnectionAdapters::Sqlserver::Utils.unqualify_db_name(qtn), - "This qualifed_table_name #{qtn} did not return nil." - end - end - - should 'return clean db_name from Utils.unqualify_db_name when table is thrid level qualified' do - @third_table_names.each do |qtn| - assert_equal @expected_db_name, - ActiveRecord::ConnectionAdapters::Sqlserver::Utils.unqualify_db_name(qtn), - "This qualifed_table_name #{qtn} did not unqualify the db_name correctly." - end - end - - end - - should 'return true to #insert_sql? for inserts only' do - assert @connection.send(:insert_sql?,'INSERT...') - assert @connection.send(:insert_sql?, "EXEC sp_executesql N'INSERT INTO [fk_test_has_fks] ([fk_id]) VALUES (@0); SELECT CAST(SCOPE_IDENTITY() AS bigint) AS Ident', N'@0 int', @0 = 0") - assert !@connection.send(:insert_sql?,'UPDATE...') - assert !@connection.send(:insert_sql?,'SELECT...') - end - - context 'for #get_table_name' do - - should 'return quoted table name from basic INSERT, UPDATE and SELECT statements' do - assert_equal '[funny_jokes]', @connection.send(:get_table_name,@basic_insert_sql) - assert_equal '[customers]', @connection.send(:get_table_name,@basic_update_sql) - assert_equal '[customers]', @connection.send(:get_table_name,@basic_select_sql) + + it "memos users dateformat" do + begin + connection.execute("SET LANGUAGE us_english") + rescue + nil end - + dateformat = connection.instance_variable_get(:@database_dateformat) + assert_equal "mdy", dateformat end - - context 'with different language' do - - setup do - @default_language = @connection.user_options_language - end - - teardown do - @connection.execute("SET LANGUAGE #{@default_language}") rescue nil - @connection.send :initialize_dateformatter - end - - should 'memoize users dateformat' do - @connection.execute("SET LANGUAGE us_english") rescue nil - dateformat = @connection.instance_variable_get(:@database_dateformat) - assert_equal 'mdy', dateformat - end - - should 'have a dateformatter' do - assert Date::DATE_FORMATS[:_sqlserver_dateformat] - assert Time::DATE_FORMATS[:_sqlserver_dateformat] - end - - should 'do a date insertion when language is german' do - @connection.execute("SET LANGUAGE deutsch") - @connection.send :initialize_dateformatter - assert_nothing_raised do - Task.create(:starting => Time.utc(2000, 1, 31, 5, 42, 0), :ending => Date.new(2006, 12, 31)) - end - end - - end - - context 'testing #enable_default_unicode_types configuration' do - - should 'use non-unicode types when set to false' do - with_enable_default_unicode_types(false) do - assert_equal 'varchar', @connection.native_string_database_type - assert_equal 'varchar(max)', @connection.native_text_database_type - end - end - - should 'use unicode types when set to true' do - with_enable_default_unicode_types(true) do - assert_equal 'nvarchar', @connection.native_string_database_type - assert_equal 'nvarchar(max)', @connection.native_text_database_type - end - end - + + it "has a dateformatter" do + assert Date::DATE_FORMATS[:_sqlserver_dateformat] + assert Time::DATE_FORMATS[:_sqlserver_dateformat] end - - context 'testing #lowercase_schema_reflection' do - setup do - UpperTestDefault.delete_all - UpperTestDefault.create :COLUMN1 => 'Got a minute?', :COLUMN2 => 419 - UpperTestDefault.create :COLUMN1 => 'Favorite number?', :COLUMN2 => 69 - end - - teardown do - @connection.lowercase_schema_reflection = false + it "does a datetime insertion when language is german" do + connection.execute("SET LANGUAGE deutsch") + connection.send :initialize_dateformatter + assert_nothing_raised do + starting = Time.utc(2000, 1, 31, 5, 42, 0) + ending = Time.new(2006, 12, 31) + Task.create! starting: starting, ending: ending end + end + end - should 'not lowercase schema reflection by default' do - assert UpperTestDefault.columns_hash['COLUMN1'] - assert_equal 'Got a minute?', UpperTestDefault.first.COLUMN1 - assert_equal 'Favorite number?', UpperTestDefault.last.COLUMN1 - assert UpperTestDefault.columns_hash['COLUMN2'] - end - - should 'lowercase schema reflection when set' do - @connection.lowercase_schema_reflection = true - UpperTestLowered.reset_column_information - assert UpperTestLowered.columns_hash['column1'] - assert_equal 'Got a minute?', UpperTestLowered.first.column1 - assert_equal 'Favorite number?', UpperTestLowered.last.column1 - assert UpperTestLowered.columns_hash['column2'] - end + describe "testing #lowercase_schema_reflection" do + before do + SSTestUpper.delete_all + SSTestUpper.create COLUMN1: "Got a minute?", COLUMN2: 419 + SSTestUpper.create COLUMN1: "Favorite number?", COLUMN2: 69 + end + after do + connection.lowercase_schema_reflection = false end - - end - - context 'For chronic data types' do - - context 'with a usec' do - - setup do - @time = Time.now - @db_datetime_003 = '2012-11-08 10:24:36.003' - @db_datetime_123 = '2012-11-08 10:24:36.123' - @all_datetimes = [@db_datetime_003, @db_datetime_123] - @all_datetimes.each do |datetime| - @connection.execute("INSERT INTO [sql_server_chronics] ([datetime]) VALUES('#{datetime}')") - end - end - - teardown do - @all_datetimes.each do |datetime| - @connection.execute("DELETE FROM [sql_server_chronics] WHERE [datetime] = '#{datetime}'") - end - end - - context 'finding existing DB objects' do - - should 'find 003 millisecond in the DB with before and after casting' do - existing_003 = SqlServerChronic.find_by_datetime!(@db_datetime_003) - assert_equal @db_datetime_003, existing_003.datetime_before_type_cast if existing_003.datetime_before_type_cast.is_a?(String) - assert_equal 3000, existing_003.datetime.usec, 'A 003 millisecond in SQL Server is 3000 microseconds' - end - - should 'find 123 millisecond in the DB with before and after casting' do - existing_123 = SqlServerChronic.find_by_datetime!(@db_datetime_123) - assert_equal @db_datetime_123, existing_123.datetime_before_type_cast if existing_123.datetime_before_type_cast.is_a?(String) - assert_equal 123000, existing_123.datetime.usec, 'A 123 millisecond in SQL Server is 123000 microseconds' - end - + + it "not lowercase schema reflection by default" do + assert SSTestUpper.columns_hash["COLUMN1"] + assert_equal "Got a minute?", SSTestUpper.first.COLUMN1 + assert_equal "Favorite number?", SSTestUpper.last.COLUMN1 + assert SSTestUpper.columns_hash["COLUMN2"] + end + + it "lowercase schema reflection when set" do + connection.lowercase_schema_reflection = true + assert SSTestUppered.columns_hash["column1"] + assert_equal "Got a minute?", SSTestUppered.first.column1 + assert_equal "Favorite number?", SSTestUppered.last.column1 + assert SSTestUppered.columns_hash["column2"] + end + + it "destroys model with no associations" do + connection.lowercase_schema_reflection = true + + assert_nothing_raised do + discount = Discount.create! + discount.destroy! end - - context 'saving new datetime objects' do - - should 'truncate 123456 usec to just 123 in the DB cast back to 123000' do - Time.any_instance.stubs :iso8601 => "2011-07-26T12:29:01.123-04:00" - saved = SqlServerChronic.create!(:datetime => @time).reload - saved.reload - assert_equal '123', saved.datetime_before_type_cast.split('.')[1] if saved.datetime_before_type_cast.is_a?(String) - assert_equal 123000, saved.datetime.usec - end - + end + + it "destroys model with association" do + connection.lowercase_schema_reflection = true + + assert_nothing_raised do + post = Post.create!(title: "Setup", body: "Record to be deleted") + post.destroy! end - end - end - - context 'For identity inserts' do - - setup do + + describe "identity inserts" do + before do @identity_insert_sql = "INSERT INTO [funny_jokes] ([id],[name]) VALUES(420,'Knock knock')" @identity_insert_sql_unquoted = "INSERT INTO funny_jokes (id, name) VALUES(420, 'Knock knock')" @identity_insert_sql_unordered = "INSERT INTO [funny_jokes] ([name],[id]) VALUES('Knock knock',420)" @identity_insert_sql_sp = "EXEC sp_executesql N'INSERT INTO [funny_jokes] ([id],[name]) VALUES (@0, @1)', N'@0 int, @1 nvarchar(255)', @0 = 420, @1 = N'Knock knock'" - @identity_insert_sql_unquoted_sp = "EXEC sp_executesql N'INSERT INTO [funny_jokes] (id, name) VALUES (@0, @1)', N'@0 int, @1 nvarchar(255)', @0 = 420, @1 = N'Knock knock'" + @identity_insert_sql_unquoted_sp = "EXEC sp_executesql N'INSERT INTO funny_jokes (id, name) VALUES (@0, @1)', N'@0 int, @1 nvarchar(255)', @0 = 420, @1 = N'Knock knock'" @identity_insert_sql_unordered_sp = "EXEC sp_executesql N'INSERT INTO [funny_jokes] ([name],[id]) VALUES (@0, @1)', N'@0 nvarchar(255), @1 int', @0 = N'Knock knock', @1 = 420" + + @identity_merge_sql = "MERGE INTO [ships] WITH (UPDLOCK, HOLDLOCK) AS target USING ( SELECT * FROM ( SELECT [id], [name], ROW_NUMBER() OVER ( PARTITION BY [id] ORDER BY [id] DESC ) AS rn_0 FROM ( VALUES (101, N'RSS Sir David Attenborough') ) AS t1 ([id], [name]) ) AS ranked_source WHERE rn_0 = 1 ) AS source ON (target.[id] = source.[id]) WHEN MATCHED THEN UPDATE SET target.[name] = source.[name] WHEN NOT MATCHED BY TARGET THEN INSERT ([id], [name]) VALUES (source.[id], source.[name]) OUTPUT INSERTED.[id]" + @identity_merge_sql_unquoted = "MERGE INTO ships WITH (UPDLOCK, HOLDLOCK) AS target USING ( SELECT * FROM ( SELECT id, name, ROW_NUMBER() OVER ( PARTITION BY id ORDER BY id DESC ) AS rn_0 FROM ( VALUES (101, N'RSS Sir David Attenborough') ) AS t1 (id, name) ) AS ranked_source WHERE rn_0 = 1 ) AS source ON (target.id = source.id) WHEN MATCHED THEN UPDATE SET target.name = source.name WHEN NOT MATCHED BY TARGET THEN INSERT (id, name) VALUES (source.id, source.name) OUTPUT INSERTED.id" + @identity_merge_sql_unordered = "MERGE INTO [ships] WITH (UPDLOCK, HOLDLOCK) AS target USING ( SELECT * FROM ( SELECT [name], [id], ROW_NUMBER() OVER ( PARTITION BY [id] ORDER BY [id] DESC ) AS rn_0 FROM ( VALUES (101, N'RSS Sir David Attenborough') ) AS t1 ([name], [id]) ) AS ranked_source WHERE rn_0 = 1 ) AS source ON (target.[id] = source.[id]) WHEN MATCHED THEN UPDATE SET target.[name] = source.[name] WHEN NOT MATCHED BY TARGET THEN INSERT ([name], [id]) VALUES (source.[name], source.[id]) OUTPUT INSERTED.[id]" + + @identity_insert_sql_non_dbo = "INSERT INTO [test].[aliens] ([id],[name]) VALUES(420,'Mork')" + @identity_insert_sql_non_dbo_unquoted = "INSERT INTO test.aliens ([id],[name]) VALUES(420,'Mork')" + @identity_insert_sql_non_dbo_unordered = "INSERT INTO [test].[aliens] ([name],[id]) VALUES('Mork',420)" + @identity_insert_sql_non_dbo_sp = "EXEC sp_executesql N'INSERT INTO [test].[aliens] ([id],[name]) VALUES (@0, @1)', N'@0 int, @1 nvarchar(255)', @0 = 420, @1 = N'Mork'" + @identity_insert_sql_non_dbo_unquoted_sp = "EXEC sp_executesql N'INSERT INTO test.aliens (id, name) VALUES (@0, @1)', N'@0 int, @1 nvarchar(255)', @0 = 420, @1 = N'Mork'" + @identity_insert_sql_non_dbo_unordered_sp = "EXEC sp_executesql N'INSERT INTO [test].[aliens] ([name],[id]) VALUES (@0, @1)', N'@0 nvarchar(255), @1 int', @0 = N'Mork', @1 = 420" + + @non_identity_insert_sql_cross_database = "INSERT INTO #{arunit2_database}.dbo.dogs SELECT * FROM #{arunit_database}.dbo.dogs" + @identity_insert_sql_cross_database = "INSERT INTO #{arunit2_database}.dbo.dogs(id) SELECT id FROM #{arunit_database}.dbo.dogs" + end + + it "return quoted table_name to #query_requires_identity_insert? when INSERT sql contains id column" do + assert_equal "[funny_jokes]", connection.send(:query_requires_identity_insert?, @identity_insert_sql) + assert_equal "[funny_jokes]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_unquoted) + assert_equal "[funny_jokes]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_unordered) + assert_equal "[funny_jokes]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_sp) + assert_equal "[funny_jokes]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_unquoted_sp) + assert_equal "[funny_jokes]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_unordered_sp) + + assert_equal "[ships]", connection.send(:query_requires_identity_insert?, @identity_merge_sql) + assert_equal "[ships]", connection.send(:query_requires_identity_insert?, @identity_merge_sql_unquoted) + assert_equal "[ships]", connection.send(:query_requires_identity_insert?, @identity_merge_sql_unordered) + + assert_equal "[test].[aliens]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_non_dbo) + assert_equal "[test].[aliens]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_non_dbo_unquoted) + assert_equal "[test].[aliens]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_non_dbo_unordered) + assert_equal "[test].[aliens]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_non_dbo_sp) + assert_equal "[test].[aliens]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_non_dbo_unquoted_sp) + assert_equal "[test].[aliens]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_non_dbo_unordered_sp) + + assert_equal "[#{arunit2_database}].[dbo].[dogs]", connection.send(:query_requires_identity_insert?, @identity_insert_sql_cross_database) end - - should 'return quoted table_name to #query_requires_identity_insert? when INSERT sql contains id column' do - assert_equal '[funny_jokes]', @connection.send(:query_requires_identity_insert?,@identity_insert_sql) - assert_equal '[funny_jokes]', @connection.send(:query_requires_identity_insert?,@identity_insert_sql_unquoted) - assert_equal '[funny_jokes]', @connection.send(:query_requires_identity_insert?,@identity_insert_sql_unordered) - assert_equal '[funny_jokes]', @connection.send(:query_requires_identity_insert?,@identity_insert_sql_sp) - assert_equal '[funny_jokes]', @connection.send(:query_requires_identity_insert?,@identity_insert_sql_unquoted_sp) - assert_equal '[funny_jokes]', @connection.send(:query_requires_identity_insert?,@identity_insert_sql_unordered_sp) - end - - should 'return false to #query_requires_identity_insert? for normal SQL' do - [@basic_insert_sql, @basic_update_sql, @basic_select_sql].each do |sql| - assert !@connection.send(:query_requires_identity_insert?,sql), "SQL was #{sql}" + + it "return false to #query_requires_identity_insert? for normal SQL" do + [basic_insert_sql, basic_merge_sql, basic_update_sql, basic_select_sql, @non_identity_insert_sql_cross_database].each do |sql| + assert !connection.send(:query_requires_identity_insert?, sql), "SQL was #{sql}" end end - - should 'find identity column using #identity_column' do - joke_id_column = Joke.columns.detect { |c| c.name == 'id' } - assert_equal joke_id_column.name, @connection.send(:identity_column,Joke.table_name).name - assert_equal joke_id_column.sql_type, @connection.send(:identity_column,Joke.table_name).sql_type - end - - should 'return nil when calling #identity_column for a table_name with no identity' do - assert_nil @connection.send(:identity_column,Subscriber.table_name) - end unless sqlserver_azure? - - should 'be able to disable referential integrity' do - Minimalistic.delete_all - @connection.send :set_identity_insert, Minimalistic.table_name, false - @connection.execute_procedure :sp_MSforeachtable, 'ALTER TABLE ? CHECK CONSTRAINT ALL' - o = Minimalistic.new - o.id = 420 - o.save! - end - + + it "find identity column" do + task_id_column = Task.columns_hash["id"] + assert_equal task_id_column.name, connection.send(:identity_columns, Task.table_name).first.name + assert_equal task_id_column.sql_type, connection.send(:identity_columns, Task.table_name).first.sql_type + end + + it "find identity column cross database" do + id_column = Dog.columns_hash["id"] + assert_equal id_column.name, arunit2_connection.send(:identity_columns, Dog.table_name).first.name + assert_equal id_column.sql_type, arunit2_connection.send(:identity_columns, Dog.table_name).first.sql_type + + id_column = OtherDog.columns_hash["id"] + assert_equal id_column.name, arunit_connection.send(:identity_columns, OtherDog.table_name).first.name + assert_equal id_column.sql_type, arunit_connection.send(:identity_columns, OtherDog.table_name).first.sql_type + end + + it "return an empty array when calling #identity_columns for a table_name with no identity" do + _(connection.send(:identity_columns, Subscriber.table_name)).must_equal [] + end end - - context 'For Quoting' do - should 'return 1 for #quoted_true' do - assert_equal '1', @connection.quoted_true + describe "quoting" do + it "return 1 for #quoted_true" do + assert_equal "1", connection.quoted_true end - - should 'return 0 for #quoted_false' do - assert_equal '0', @connection.quoted_false + + it "return 0 for #quoted_false" do + assert_equal "0", connection.quoted_false end - - should 'not escape backslash characters like abstract adapter' do + + it "not escape backslash characters like abstract adapter" do string_with_backslashs = "\\n" - assert_equal string_with_backslashs, @connection.quote_string(string_with_backslashs) + assert_equal string_with_backslashs, connection.quote_string(string_with_backslashs) end - - should 'quote column names with brackets' do - assert_equal '[foo]', @connection.quote_column_name(:foo) - assert_equal '[foo]', @connection.quote_column_name('foo') - assert_equal '[foo].[bar]', @connection.quote_column_name('foo.bar') + + it "quote column names with brackets" do + assert_equal "[foo]", connection.quote_column_name(:foo) + assert_equal "[foo]", connection.quote_column_name("foo") + assert_equal "[foo].[bar]", connection.quote_column_name("foo.bar") end - - should 'not quote already quoted column names with brackets' do - assert_equal '[foo]', @connection.quote_column_name('[foo]') - assert_equal '[foo].[bar]', @connection.quote_column_name('[foo].[bar]') + + it "not quote already quoted column names with brackets" do + assert_equal "[foo]", connection.quote_column_name("[foo]") + assert_equal "[foo].[bar]", connection.quote_column_name("[foo].[bar]") end - - should 'quote table names like columns' do - assert_equal '[foo].[bar]', @connection.quote_column_name('foo.bar') - assert_equal '[foo].[bar].[baz]', @connection.quote_column_name('foo.bar.baz') + + it "quote table names like columns" do + assert_equal "[foo].[bar]", connection.quote_column_name("foo.bar") + assert_equal "[foo].[bar].[baz]", connection.quote_column_name("foo.bar.baz") end - context "#quote" do + it "surround string with national prefix" do + assert_equal "N'foo'", connection.quote("foo") + end - context "string and multibyte values" do + it "escape all single quotes by repeating them" do + assert_equal "N'''quotation''s'''", connection.quote("'quotation's'") + end + end - context "on an activerecord :integer column" do + describe "disabling referential integrity" do + before do + connection.disable_referential_integrity { + SSTestHasPk.delete_all + SSTestHasFk.delete_all + } + @parent = SSTestHasPk.create! + @member = SSTestHasFk.create!(fk_id: @parent.id) + end - setup do - @column = Post.columns_hash['id'] - end + it "NOT ALLOW by default the deletion of a referenced parent" do + SSTestHasPk.lease_connection.disable_referential_integrity {} + assert_raise(ActiveRecord::StatementInvalid) { @parent.destroy } + end - should "return 0 for empty string" do - assert_equal '0', @connection.quote('', @column) - end + it "ALLOW deletion of referenced parent using #disable_referential_integrity block" do + assert_difference("SSTestHasPk.count", -1) do + SSTestHasPk.lease_connection.disable_referential_integrity { @parent.destroy } + end + end - end + it "again NOT ALLOW deletion of referenced parent after #disable_referential_integrity block" do + assert_raise(ActiveRecord::StatementInvalid) do + SSTestHasPk.lease_connection.disable_referential_integrity {} + @parent.destroy + end + end - context "on an activerecord :string column or with any value" do + it "not disable referential integrity for the same table twice" do + tables = SSTestHasPk.lease_connection.tables_with_referential_integrity + assert_equal tables.size, tables.uniq.size + end + end - should "surround it when N'...'" do - assert_equal "N'foo'", @connection.quote("foo") - end + describe "database statements" do + it "run the database consistency checker 'user_options' command" do + skip "on azure" if connection_sqlserver_azure? + keys = [:textsize, :language, :isolation_level, :dateformat] + user_options = connection.user_options + keys.each do |key| + msg = "Expected key:#{key} in user_options:#{user_options.inspect}" + assert user_options.key?(key), msg + end + end - should "escape all single quotes by repeating them" do - assert_equal "N'''quotation''s'''", @connection.quote("'quotation's'") - end + it "return a underscored key hash with indifferent access of the results" do + skip "on azure" if connection_sqlserver_azure? + user_options = connection.user_options + assert_equal "read committed", user_options["isolation_level"] + assert_equal "read committed", user_options[:isolation_level] + end + end - end + describe "schema statements" do + it "create integers when no limit supplied" do + assert_equal "integer", connection.type_to_sql(:integer) + end - end + it "create integers when limit is 4" do + assert_equal "integer", connection.type_to_sql(:integer, limit: 4) + end - context "date and time values" do + it "create integers when limit is 3" do + assert_equal "integer", connection.type_to_sql(:integer, limit: 3) + end - setup do - @date = Date.parse '2000-01-01' - @column = SqlServerChronic.columns_hash['datetime'] - end + it "create smallints when limit is 2" do + assert_equal "smallint", connection.type_to_sql(:integer, limit: 2) + end - context "on a sql datetime column" do + it "create tinyints when limit is 1" do + assert_equal "tinyint", connection.type_to_sql(:integer, limit: 1) + end - should "call quoted_datetime and surrounds its result with single quotes" do - assert_equal "'01-01-2000'", @connection.quote(@date, @column) - end + it "create bigints when limit is greater than 4" do + assert_equal "bigint", connection.type_to_sql(:integer, limit: 5) + assert_equal "bigint", connection.type_to_sql(:integer, limit: 6) + assert_equal "bigint", connection.type_to_sql(:integer, limit: 7) + assert_equal "bigint", connection.type_to_sql(:integer, limit: 8) + end - end + it "create floats when no limit supplied" do + assert_equal "float", connection.type_to_sql(:float) + end + end - end + describe "views" do + # Using connection.views + it "return an array" do + assert_instance_of Array, connection.views end - context "#quoted_datetime" do - - setup do - @iso_string = '2001-02-03T04:05:06-0700' - @date = Date.parse @iso_string - @time = Time.parse @iso_string - @datetime = DateTime.parse @iso_string - end - - context "with a Date" do + it "find SSTestCustomersView table name" do + _(connection.views).must_include "sst_customers_view" + end - should "return a dd-mm-yyyy date string" do - assert_equal '02-03-2001', @connection.quoted_datetime(@date) - end + it "work with dynamic finders" do + name = "MetaSkills" + customer = SSTestCustomersView.create! name: name + assert_equal customer, SSTestCustomersView.find_by_name(name) + end + it "not contain system views" do + systables = ["sysconstraints", "syssegments"] + systables.each do |systable| + assert !connection.views.include?(systable), "This systable #{systable} should not be in the views array." end + end - context "when the ActiveRecord default timezone is UTC" do + it "allow the connection#view_information method to return meta data on the view" do + view_info = connection.send(:view_information, "sst_customers_view") + assert_equal("sst_customers_view", view_info["TABLE_NAME"]) + assert_match(/CREATE VIEW sst_customers_view/, view_info["VIEW_DEFINITION"]) + end - setup do - @old_activerecord_timezone = ActiveRecord::Base.default_timezone - ActiveRecord::Base.default_timezone = :utc - end + it "allows connection#view_information to work with qualified object names" do + view_info = connection.send(:view_information, "[activerecord_unittest].[dbo].[sst_customers_view]") + assert_equal("sst_customers_view", view_info["TABLE_NAME"]) + assert_match(/CREATE VIEW sst_customers_view/, view_info["VIEW_DEFINITION"]) + end - teardown do - ActiveRecord::Base.default_timezone = @old_activerecord_timezone - @old_activerecord_timezone = nil - end + it "allows connection#view_information to work across databases when using qualified object names" do + # College is defined in activerecord_unittest2 database. + view_info = College.lease_connection.send(:view_information, "[activerecord_unittest].[dbo].[sst_customers_view]") + assert_equal("sst_customers_view", view_info["TABLE_NAME"]) + assert_match(/CREATE VIEW sst_customers_view/, view_info["VIEW_DEFINITION"]) + end - context "with a Time" do + it "allow the connection#view_table_name method to return true table_name for the view" do + assert_equal "customers", connection.send(:view_table_name, "sst_customers_view") + assert_equal "topics", connection.send(:view_table_name, "topics"), "No view here, the same table name should come back." + end - should "return an ISO 8601 datetime string" do - assert_equal '2001-02-03T11:05:06.000', @connection.quoted_datetime(@time) - end + it "allow the connection#view_table_name method to return true table_name for the view for other connections" do + assert_equal "customers", College.lease_connection.send(:view_table_name, "[activerecord_unittest].[dbo].[sst_customers_view]") + assert_equal "topics", College.lease_connection.send(:view_table_name, "topics"), "No view here, the same table name should come back." + end + # With same column names - end + it "have matching column objects" do + columns = ["id", "name", "balance"] + assert !SSTestCustomersView.columns.blank? + assert_equal columns.size, SSTestCustomersView.columns.size + columns.each do |colname| + assert_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Column, + SSTestCustomersView.columns_hash[colname], + "Column name #{colname.inspect} was not found in these columns #{SSTestCustomersView.columns.map(&:name).inspect}" + end + end - context "with a DateTime" do + it "find identity column" do + _(SSTestCustomersView.primary_key).must_equal "id" + _(connection.primary_key(SSTestCustomersView.table_name)).must_equal "id" + _(SSTestCustomersView.columns_hash["id"]).must_be :is_identity? + end - should "return an ISO 8601 datetime string" do - assert_equal '2001-02-03T11:05:06', @connection.quoted_datetime(@datetime) - end + it "find default values" do + assert_equal 0, SSTestCustomersView.new.balance + end - end + it "respond true to data_source_exists?" do + assert SSTestCustomersView.lease_connection.data_source_exists?(SSTestCustomersView.table_name) + end - context "with an ActiveSupport::TimeWithZone" do + # With aliased column names - context "wrapping a datetime" do + it "have matching column objects" do + columns = ["id", "pretend_null"] + assert !SSTestStringDefaultsView.columns.blank? + assert_equal columns.size, SSTestStringDefaultsView.columns.size + columns.each do |colname| + assert_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Column, + SSTestStringDefaultsView.columns_hash[colname], + "Column name #{colname.inspect} was not found in these columns #{SSTestStringDefaultsView.columns.map(&:name).inspect}" + end + end - should "return an ISO 8601 datetime string with milliseconds" do - Time.use_zone('Eastern Time (US & Canada)') do - assert_equal '2001-02-03T11:05:06.000', @connection.quoted_datetime(@datetime.in_time_zone) - end - end + it "find identity column" do + _(SSTestStringDefaultsView.primary_key).must_equal "id" + _(connection.primary_key(SSTestStringDefaultsView.table_name)).must_equal "id" + _(SSTestStringDefaultsView.columns_hash["id"]).must_be :is_identity? + end - end + it "find default values" do + assert_equal "null", SSTestStringDefaultsView.new.pretend_null, + SSTestStringDefaultsView.columns_hash["pretend_null"].inspect + end - context "wrapping a time" do + it "respond true to data_source_exists?" do + assert SSTestStringDefaultsView.lease_connection.data_source_exists?(SSTestStringDefaultsView.table_name) + end - should "return an ISO 8601 datetime string with milliseconds" do - Time.use_zone('Eastern Time (US & Canada)') do - assert_equal '2001-02-03T11:05:06.000', @connection.quoted_datetime(@time.in_time_zone) - end - end + # That have more than 4000 chars for their definition - end + it "cope with null returned for the definition" do + assert_nothing_raised { SSTestStringDefaultsBigView.columns } + end - end + it "using alternate view definition still be able to find real default" do + assert_equal "null", SSTestStringDefaultsBigView.new.pretend_null, + SSTestStringDefaultsBigView.columns_hash["pretend_null"].inspect + end + end - end + describe "database_prefix_remote_server?" do + after do + connection_options.delete(:database_prefix) + end + it "returns false if database_prefix is not configured" do + assert_equal false, connection.database_prefix_remote_server? end - - end - - context 'When disabling referential integrity' do - - setup do - @connection.disable_referential_integrity { FkTestHasPk.delete_all; FkTestHasFk.delete_all } - @parent = FkTestHasPk.create! - @member = FkTestHasFk.create!(:fk_id => @parent.id) - end - - should 'NOT ALLOW by default the deletion of a referenced parent' do - FkTestHasPk.connection.disable_referential_integrity { } - assert_raise(ActiveRecord::StatementInvalid) { @parent.destroy } + + it "returns true if database_prefix has been set" do + connection_options[:database_prefix] = "server.database.schema." + assert_equal true, connection.database_prefix_remote_server? end - - should 'ALLOW deletion of referenced parent using #disable_referential_integrity block' do - FkTestHasPk.connection.disable_referential_integrity { @parent.destroy } + + it "returns false if database_prefix has been set incorrectly" do + connection_options[:database_prefix] = "server.database.schema" + assert_equal false, connection.database_prefix_remote_server? end - - should 'again NOT ALLOW deletion of referenced parent after #disable_referential_integrity block' do - assert_raise(ActiveRecord::StatementInvalid) do - FkTestHasPk.connection.disable_referential_integrity { } - @parent.destroy - end + end + + it "in_memory_oltp" do + if ENV["IN_MEMORY_OLTP"] && connection.supports_in_memory_oltp? + _(SSTMemory.primary_key).must_equal "id" + _(SSTMemory.columns_hash["id"]).must_be :is_identity? + else + skip "supports_in_memory_oltp? => false" end - end - - context 'For DatabaseStatements' do - - context "finding out what user_options are available" do - - should "run the database consistency checker useroptions command" do - keys = [:textsize, :language, :isolation_level, :dateformat] - user_options = @connection.user_options - keys.each do |key| - msg = "Expected key:#{key} in user_options:#{user_options.inspect}" - assert user_options.key?(key), msg + + describe "block writes to a database" do + def setup + @conn = ActiveRecord::Base.lease_connection + end + + def test_errors_when_an_insert_query_is_called_while_preventing_writes + assert_raises(ActiveRecord::ReadOnlyError) do + ActiveRecord::Base.while_preventing_writes do + @conn.insert("INSERT INTO [subscribers] ([nick]) VALUES ('aido')") end end - - should "return a underscored key hash with indifferent access of the results" do - user_options = @connection.user_options - assert_equal 'read committed', user_options['isolation_level'] - assert_equal 'read committed', user_options[:isolation_level] - end - - end unless sqlserver_azure? - - context "altering isolation levels" do - - should "barf if the requested isolation level is not valid" do - assert_raise(ArgumentError) do - @connection.run_with_isolation_level 'INVALID ISOLATION LEVEL' do; end + end + + def test_errors_when_an_update_query_is_called_while_preventing_writes + @conn.insert("INSERT INTO [subscribers] ([nick]) VALUES ('aido')") + + assert_raises(ActiveRecord::ReadOnlyError) do + ActiveRecord::Base.while_preventing_writes do + @conn.update("UPDATE [subscribers] SET [subscribers].[name] = 'Aidan' WHERE [subscribers].[nick] = 'aido'") end end - - context "with a valid isolation level" do - - setup do - @t1 = tasks(:first_task) - @t2 = tasks(:another_task) - assert @t1, 'Tasks :first_task should be in AR fixtures' - assert @t2, 'Tasks :another_task should be in AR fixtures' - good_isolation_level = @connection.user_options_isolation_level.blank? || @connection.user_options_isolation_level =~ /read committed/i - assert good_isolation_level, "User isolation level is not at a happy starting place: #{@connection.user_options_isolation_level.inspect}" - end - - should 'allow #run_with_isolation_level to not take a block to set it' do - begin - @connection.run_with_isolation_level 'READ UNCOMMITTED' - assert_match %r|read uncommitted|i, @connection.user_options_isolation_level - ensure - @connection.run_with_isolation_level 'READ COMMITTED' - end - end - - should 'return block value using #run_with_isolation_level' do - assert_same_elements Task.find(:all), @connection.run_with_isolation_level('READ UNCOMMITTED') { Task.find(:all) } - end - - should 'pass a read uncommitted isolation level test' do - assert_nil @t2.starting, 'Fixture should have this empty.' - begin - Task.transaction do - @t2.starting = Time.now - @t2.save - @dirty_t2 = @connection.run_with_isolation_level('READ UNCOMMITTED') { Task.find(@t2.id) } - raise ActiveRecord::ActiveRecordError - end - rescue - 'Do Nothing' - end - assert @dirty_t2, 'Should have a Task record from within block above.' - assert @dirty_t2.starting, 'Should have a dirty date.' - assert_nil Task.find(@t2.id).starting, 'Should be nil again from botched transaction above.' - end - - end unless sqlserver_azure? - end - - end - - context 'For SchemaStatements' do - - context 'returning from #type_to_sql' do - - should 'create integers when no limit supplied' do - assert_equal 'integer', @connection.type_to_sql(:integer) - end - - should 'create integers when limit is 4' do - assert_equal 'integer', @connection.type_to_sql(:integer, 4) - end - - should 'create integers when limit is 3' do - assert_equal 'integer', @connection.type_to_sql(:integer, 3) - end - - should 'create smallints when limit is less than 3' do - assert_equal 'smallint', @connection.type_to_sql(:integer, 2) - assert_equal 'smallint', @connection.type_to_sql(:integer, 1) - end - - should 'create bigints when limit is greateer than 4' do - assert_equal 'bigint', @connection.type_to_sql(:integer, 5) - assert_equal 'bigint', @connection.type_to_sql(:integer, 6) - assert_equal 'bigint', @connection.type_to_sql(:integer, 7) - assert_equal 'bigint', @connection.type_to_sql(:integer, 8) - end - - should 'create floats when no limit supplied' do - assert_equal 'float(8)', @connection.type_to_sql(:float) + + def test_errors_when_a_delete_query_is_called_while_preventing_writes + @conn.execute("INSERT INTO [subscribers] ([nick]) VALUES ('aido')") + + assert_raises(ActiveRecord::ReadOnlyError) do + ActiveRecord::Base.while_preventing_writes do + @conn.execute("DELETE FROM [subscribers] WHERE [subscribers].[nick] = 'aido'") + end end + end + + def test_doesnt_error_when_a_select_query_is_called_while_preventing_writes + @conn.execute("INSERT INTO [subscribers] ([nick]) VALUES ('aido')") - should 'create floats when limit is supplied' do - assert_equal 'float(27)', @connection.type_to_sql(:float, 27) + ActiveRecord::Base.while_preventing_writes do + assert_equal 1, @conn.execute("SELECT * FROM [subscribers] WHERE [subscribers].[nick] = 'aido'").count end - end - end - - context 'For indexes' do - - setup do - @desc_index_name = 'idx_credit_limit_test_desc' - @connection.execute "CREATE INDEX [#{@desc_index_name}] ON [accounts] (credit_limit DESC)" - end - - teardown do - @connection.execute "DROP INDEX [#{@desc_index_name}] ON [accounts]" - end - - should 'have indexes with descending order' do - assert @connection.indexes('accounts').detect { |i| i.name == @desc_index_name } - end - - end - - context 'For views' do - - context 'using @connection.views' do - - should 'return an array' do - assert_instance_of Array, @connection.views - end - - should 'find CustomersView table name' do - assert_contains @connection.views, 'customers_view' - end - - should 'work with dynamic finders' do - name = 'MetaSkills' - customer = CustomersView.create! :name => name - assert_equal customer, CustomersView.find_by_name(name) - end - - should 'not contain system views' do - systables = ['sysconstraints','syssegments'] - systables.each do |systable| - assert !@connection.views.include?(systable), "This systable #{systable} should not be in the views array." - end - end - - should 'allow the connection#view_information method to return meta data on the view' do - view_info = @connection.send(:view_information,'customers_view') - assert_equal('customers_view', view_info['TABLE_NAME']) - assert_match(/CREATE VIEW customers_view/, view_info['VIEW_DEFINITION']) - end - - should 'allow the connection#view_table_name method to return true table_name for the view' do - assert_equal 'customers', @connection.send(:view_table_name,'customers_view') - assert_equal 'topics', @connection.send(:view_table_name,'topics'), 'No view here, the same table name should come back.' + + describe "table is in non-dbo schema" do + it "records can be created successfully" do + assert_difference("Alien.count", 1) do + Alien.create!(name: "Trisolarans") end - - end - - context 'used by a class for table_name' do - - context 'with same column names' do - - should 'have matching column objects' do - columns = ['id','name','balance'] - assert !CustomersView.columns.blank? - assert_equal columns.size, CustomersView.columns.size - columns.each do |colname| - assert_instance_of ActiveRecord::ConnectionAdapters::SQLServerColumn, - CustomersView.columns_hash[colname], - "Column name #{colname.inspect} was not found in these columns #{CustomersView.columns.map(&:name).inspect}" - end - end - - should 'find identity column' do - assert CustomersView.columns_hash['id'].primary - end - - should 'find default values' do - assert_equal 0, CustomersView.new.balance - end - - should 'respond true to table_exists?' do - assert CustomersView.table_exists? - end - - should 'have correct table name for all column objects' do - assert CustomersView.columns.all?{ |c| c.table_name == 'customers_view' }, - CustomersView.columns.map(&:table_name).inspect - end - + end + + it "records can be inserted using SQL" do + assert_difference("Alien.count", 2) do + Alien.lease_connection.exec_insert("insert into [test].[aliens] (id, name) VALUES(1, 'Trisolarans'), (2, 'Xenomorph')") end - - context 'with aliased column names' do - - should 'have matching column objects' do - columns = ['id','pretend_null'] - assert !StringDefaultsView.columns.blank? - assert_equal columns.size, StringDefaultsView.columns.size - columns.each do |colname| - assert_instance_of ActiveRecord::ConnectionAdapters::SQLServerColumn, - StringDefaultsView.columns_hash[colname], - "Column name #{colname.inspect} was not found in these columns #{StringDefaultsView.columns.map(&:name).inspect}" - end - end - - should 'find identity column' do - assert StringDefaultsView.columns_hash['id'].primary - end - - should 'find default values' do - assert_equal 'null', StringDefaultsView.new.pretend_null, - StringDefaultsView.columns_hash['pretend_null'].inspect - end - - should 'respond true to table_exists?' do - assert StringDefaultsView.table_exists? - end - - should 'have correct table name for all column objects' do - assert StringDefaultsView.columns.all?{ |c| c.table_name == 'string_defaults_view' }, - StringDefaultsView.columns.map(&:table_name).inspect - end - + end + end + + describe "table names contains spaces" do + it "records can be created successfully" do + assert_difference("TableWithSpaces.count", 1) do + TableWithSpaces.create!(name: "Bob") end - end - - context 'doing identity inserts' do - - setup do - @view_insert_sql = "INSERT INTO [customers_view] ([id],[name],[balance]) VALUES (420,'Microsoft',0)" + end + + describe "exec_insert" do + it "values clause should be case-insensitive" do + assert_difference("Post.count", 4) do + first_insert = connection.exec_insert("INSERT INTO [posts] ([id],[title],[body]) VALUES(100, 'Title', 'Body'), (102, 'Title', 'Body')") + second_insert = connection.exec_insert("INSERT INTO [posts] ([id],[title],[body]) values(113, 'Body', 'Body'), (114, 'Body', 'Body')") + + assert_equal first_insert.rows.map(&:first), [100, 102] + assert_equal second_insert.rows.map(&:first), [113, 114] end - - should 'respond true/tablename to #query_requires_identity_insert?' do - assert_equal '[customers_view]', @connection.send(:query_requires_identity_insert?,@view_insert_sql) + end + end + + describe "mismatched foreign keys error" do + def setup + @conn = ActiveRecord::Base.lease_connection + end + + it "raises an error when the foreign key is mismatched" do + error = assert_raises(ActiveRecord::MismatchedForeignKey) do + @conn.add_reference :engines, :old_car + @conn.add_foreign_key :engines, :old_cars end - - should 'be able to do an identity insert' do - assert_nothing_raised { @connection.execute(@view_insert_sql) } - assert CustomersView.find(420) + + assert_match( + %r{Column 'old_cars\.id' is not the same data type as referencing column 'engines\.old_car_id' in foreign key '.*'}, + error.message + ) + assert_not_nil error.cause + assert_equal @conn.pool, error.connection_pool + ensure + begin + @conn.execute("ALTER TABLE engines DROP COLUMN old_car_id") + rescue + nil end - end - - context 'that have more than 4000 chars for their defintion' do - - should 'cope with null returned for the defintion' do - assert_nothing_raised() { StringDefaultsBigView.columns } - end - - should 'using alternate view defintion still be able to find real default' do - assert_equal 'null', StringDefaultsBigView.new.pretend_null, - StringDefaultsBigView.columns_hash['pretend_null'].inspect + end + + describe "placeholder conditions" do + it "using time placeholder" do + assert_equal Task.where("starting < ?", Time.now).count, 1 + end + + it "using date placeholder" do + assert_equal Task.where("starting < ?", Date.today).count, 1 + end + + it "using date-time placeholder" do + assert_equal Task.where("starting < ?", DateTime.current).count, 1 + end + end + + describe "distinct select query" do + it "generated SQL does not contain unnecessary alias projection" do + sqls = capture_sql do + Post.includes(:comments).joins(:comments).first end - + assert_no_match(/AS alias_0/, sqls.first) end - end - end diff --git a/test/cases/attribute_methods_test_sqlserver.rb b/test/cases/attribute_methods_test_sqlserver.rb deleted file mode 100644 index ea3bd78ad..000000000 --- a/test/cases/attribute_methods_test_sqlserver.rb +++ /dev/null @@ -1,40 +0,0 @@ -require 'cases/sqlserver_helper' -require 'models/developer' -require 'models/topic' - -class AttributeMethodsTestSqlserver < ActiveRecord::TestCase -end - -class AttributeMethodsTest < ActiveRecord::TestCase - - COERCED_TESTS = [ - :test_read_attributes_before_type_cast_on_datetime, - :test_typecast_attribute_from_select_to_false, - :test_typecast_attribute_from_select_to_true - ] - - include SqlserverCoercedTest - - fixtures :developers - - def test_coerced_read_attributes_before_type_cast_on_datetime - developer = Developer.find(:first) - if developer.created_at_before_type_cast.is_a?(String) - assert_equal "#{developer.created_at.to_s(:db)}.000" , developer.attributes_before_type_cast["created_at"] - end - end - - def test_coerced_typecast_attribute_from_select_to_false - topic = Topic.create(:title => 'Budget') - topic = Topic.find(:first, :select => "topics.*, CASE WHEN 1=2 THEN 1 ELSE 0 END as is_test") - assert !topic.is_test? - end - - def test_coerced_typecast_attribute_from_select_to_true - topic = Topic.create(:title => 'Budget') - topic = Topic.find(:first, :select => "topics.*, CASE WHEN 2=2 THEN 1 ELSE 0 END as is_test") - assert topic.is_test? - end - - -end diff --git a/test/cases/base_test_sqlserver.rb b/test/cases/base_test_sqlserver.rb deleted file mode 100644 index 2dc5498dc..000000000 --- a/test/cases/base_test_sqlserver.rb +++ /dev/null @@ -1,23 +0,0 @@ -require 'cases/sqlserver_helper' -require 'models/post' -require 'models/auto_id' - -class BasicsTest < ActiveRecord::TestCase - - COERCED_TESTS = [:test_column_names_are_escaped] - - include SqlserverCoercedTest - - should 'operate as other database adapters when finding primary keys, standards are postgresql adapter' do - assert_nil Post.where(id:'').first - assert_nil Post.where(id:nil).first - assert_raise(ActiveRecord::RecordNotFound) { Post.find('') } - assert_raise(ActiveRecord::RecordNotFound) { Post.find(nil) } - end - - def test_coerced_column_names_are_escaped - assert_equal "[foo]]bar]", ActiveRecord::Base.connection.quote_column_name("foo]bar") - end - -end - diff --git a/test/cases/batches_test_sqlserver.rb b/test/cases/batches_test_sqlserver.rb deleted file mode 100644 index 60c486ed1..000000000 --- a/test/cases/batches_test_sqlserver.rb +++ /dev/null @@ -1,28 +0,0 @@ -require 'cases/sqlserver_helper' -require 'models/post' - -class BatchesTestSqlserver < ActiveRecord::TestCase -end - -class EachTest < ActiveRecord::TestCase - - COERCED_TESTS = [ - :test_find_in_batches_should_quote_batch_order - ] - - include SqlserverCoercedTest - - fixtures :posts - - def test_coerced_find_in_batches_should_quote_batch_order - c = Post.connection - assert_sql(/ORDER BY \[posts\]\.\[id\]/) do - Post.find_in_batches(:batch_size => 1) do |batch| - assert_kind_of Array, batch - assert_kind_of Post, batch.first - end - end - end - - -end diff --git a/test/cases/belongs_to_associations_test_sqlserver.rb b/test/cases/belongs_to_associations_test_sqlserver.rb deleted file mode 100644 index e6d02020b..000000000 --- a/test/cases/belongs_to_associations_test_sqlserver.rb +++ /dev/null @@ -1,19 +0,0 @@ -require 'cases/sqlserver_helper' - -class BelongsToAssociationsTestSqlserver < ActiveRecord::TestCase -end - -class BelongsToAssociationsTest < ActiveRecord::TestCase - - COERCED_TESTS = [:test_belongs_to_with_primary_key_joins_on_correct_column] - - include SqlserverCoercedTest - - def test_coerced_belongs_to_with_primary_key_joins_on_correct_column - sql = Client.joins(:firm_with_primary_key).to_sql - assert_no_match(/\[firm_with_primary_keys_companies\]\.\[id\]/, sql) - assert_match(/\[firm_with_primary_keys_companies\]\.\[name\]/, sql) - end - - -end diff --git a/test/cases/binary_test_sqlserver.rb b/test/cases/binary_test_sqlserver.rb deleted file mode 100644 index d2d323ff0..000000000 --- a/test/cases/binary_test_sqlserver.rb +++ /dev/null @@ -1,14 +0,0 @@ -require 'cases/sqlserver_helper' - -class BinaryTest < ActiveRecord::TestCase - - COERCED_TESTS = [:test_mixed_encoding] - - include SqlserverCoercedTest - - def test_coerced_mixed_encoding - assert true # We do encodings right. - end - -end - diff --git a/test/cases/bind_parameter_test_sqlserver.rb b/test/cases/bind_parameter_test_sqlserver.rb deleted file mode 100644 index fdd30ab6e..000000000 --- a/test/cases/bind_parameter_test_sqlserver.rb +++ /dev/null @@ -1,24 +0,0 @@ -require 'cases/sqlserver_helper' -require 'models/topic' - -class BindParameterTestSqlserver < ActiveRecord::TestCase -end - -class ActiveRecord::BindParameterTest < ActiveRecord::TestCase - - fixtures :topics - - COERCED_TESTS = [ - :test_binds_are_logged - ] - - include SqlserverCoercedTest - - def test_coerced_binds_are_logged - assert true, 'they are!' - end - - -end - - diff --git a/test/cases/calculations_test_sqlserver.rb b/test/cases/calculations_test_sqlserver.rb deleted file mode 100644 index 7a993c465..000000000 --- a/test/cases/calculations_test_sqlserver.rb +++ /dev/null @@ -1,59 +0,0 @@ -require 'cases/sqlserver_helper' -require 'models/company' -require 'models/topic' -require 'models/edge' -require 'models/club' -require 'models/organization' - -class CalculationsTestSqlserver < ActiveRecord::TestCase -end - -class CalculationsTest < ActiveRecord::TestCase - - COERCED_TESTS = [ - :test_should_return_decimal_average_of_integer_field, - :test_should_sum_expression, - :test_limit_is_kept, - :test_limit_with_offset_is_kept, - :test_offset_is_kept - ] - - include SqlserverCoercedTest - - fixtures :accounts - - def test_coerced_should_return_decimal_average_of_integer_field - # Other DBs return 3.5 like this. - # Account.all.map(&:id).inspect # => [1, 2, 3, 4, 5, 6] - # (1+2+3+4+5+6)/6.0 # => 3.5 - # But SQL Server does something like this. Bogus! - # (1+2+3+4+5+6)/6 # => 3 - value = Account.average(:id) - assert_equal 3, value - end - - def test_coerced_should_sum_expression - assert_equal 636, Account.sum("2 * credit_limit") - end - - def test_coerced_limit_is_kept - queries = assert_sql { Account.limit(1).count } - assert_equal 1, queries.length - assert_match(/TOP \(1\)/, queries.first) - end - - def test_coerced_limit_with_offset_is_kept - queries = assert_sql { Account.limit(1).offset(1).count } - assert_equal 1, queries.length - assert_match(/TOP \(1\)/, queries.first) - assert_match(/\[__rn\] > \(1\)/, queries.first) - end - - def test_coerced_offset_is_kept - queries = assert_sql { Account.offset(1).count } - assert_equal 1, queries.length - assert_match(/\[__rn\] > \(1\)/, queries.first) - end - - -end diff --git a/test/cases/change_column_collation_test_sqlserver.rb b/test/cases/change_column_collation_test_sqlserver.rb new file mode 100644 index 000000000..94ddf2ee7 --- /dev/null +++ b/test/cases/change_column_collation_test_sqlserver.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" +require "migrations/create_clients_and_change_column_collation" + +class ChangeColumnCollationTestSqlServer < ActiveRecord::TestCase + before do + @old_verbose = ActiveRecord::Migration.verbose + ActiveRecord::Migration.verbose = false + CreateClientsAndChangeColumnCollation.new.up + end + + after do + CreateClientsAndChangeColumnCollation.new.down + ActiveRecord::Migration.verbose = @old_verbose + end + + def find_column(table, name) + table.find { |column| column.name == name } + end + + let(:clients_table) { connection.columns("clients") } + let(:name_column) { find_column(clients_table, "name") } + let(:code_column) { find_column(clients_table, "code") } + + it "change column collation to other than default" do + _(name_column.collation).must_equal "SQL_Latin1_General_CP1_CS_AS" + end + + it "change column collation to default" do + _(code_column.collation).must_equal "SQL_Latin1_General_CP1_CI_AS" + end +end diff --git a/test/cases/change_column_index_test_sqlserver.rb b/test/cases/change_column_index_test_sqlserver.rb new file mode 100644 index 000000000..08206a961 --- /dev/null +++ b/test/cases/change_column_index_test_sqlserver.rb @@ -0,0 +1,108 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class ChangeColumnIndexTestSqlServer < ActiveRecord::TestCase + class CreateClientsWithUniqueIndex < ActiveRecord::Migration[8.0] + def up + create_table :clients do |t| + t.string :name, limit: 15 + end + add_index :clients, :name, unique: true + end + + def down + drop_table :clients + end + end + + class CreateBlogPostsWithMultipleIndexesOnTheSameColumn < ActiveRecord::Migration[8.0] + def up + create_table :blog_posts do |t| + t.string :title, limit: 15 + t.string :subtitle + end + add_index :blog_posts, :title, unique: true, where: "([blog_posts].[title] IS NOT NULL)", name: "custom_index_name" + add_index :blog_posts, [:title, :subtitle], unique: true + end + + def down + drop_table :blog_posts + end + end + + class ChangeClientsNameLength < ActiveRecord::Migration[8.0] + def up + change_column :clients, :name, :string, limit: 30 + end + end + + class ChangeBlogPostsTitleLength < ActiveRecord::Migration[8.0] + def up + change_column :blog_posts, :title, :string, limit: 30 + end + end + + before do + @old_verbose = ActiveRecord::Migration.verbose + ActiveRecord::Migration.verbose = false + + CreateClientsWithUniqueIndex.new.up + CreateBlogPostsWithMultipleIndexesOnTheSameColumn.new.up + end + + after do + CreateClientsWithUniqueIndex.new.down + CreateBlogPostsWithMultipleIndexesOnTheSameColumn.new.down + + ActiveRecord::Migration.verbose = @old_verbose + end + + def test_index_uniqueness_is_maintained_after_column_change + indexes = ActiveRecord::Base.connection.indexes("clients") + columns = ActiveRecord::Base.connection.columns("clients") + assert_equal columns.find { |column| column.name == "name" }.limit, 15 + assert_equal indexes.size, 1 + assert_equal indexes.first.name, "index_clients_on_name" + assert indexes.first.unique + + ChangeClientsNameLength.new.up + + indexes = ActiveRecord::Base.connection.indexes("clients") + columns = ActiveRecord::Base.connection.columns("clients") + assert_equal columns.find { |column| column.name == "name" }.limit, 30 + assert_equal indexes.size, 1 + assert_equal indexes.first.name, "index_clients_on_name" + assert indexes.first.unique + end + + def test_multiple_index_options_are_maintained_after_column_change + indexes = ActiveRecord::Base.connection.indexes("blog_posts") + columns = ActiveRecord::Base.connection.columns("blog_posts") + assert_equal columns.find { |column| column.name == "title" }.limit, 15 + assert_equal indexes.size, 2 + + index_1 = indexes.find { |index| index.columns == ["title"] } + assert_equal index_1.name, "custom_index_name" + assert_equal index_1.where, "([blog_posts].[title] IS NOT NULL)" + assert index_1.unique + + index_2 = indexes.find { |index| index.columns == ["title", "subtitle"] } + assert index_2.unique + + ChangeBlogPostsTitleLength.new.up + + indexes = ActiveRecord::Base.connection.indexes("blog_posts") + columns = ActiveRecord::Base.connection.columns("blog_posts") + assert_equal columns.find { |column| column.name == "title" }.limit, 30 + assert_equal indexes.size, 2 + + index_1 = indexes.find { |index| index.columns == ["title"] } + assert_equal index_1.name, "custom_index_name" + assert_equal index_1.where, "([blog_posts].[title] IS NOT NULL)" + assert index_1.unique + + index_2 = indexes.find { |index| index.columns == ["title", "subtitle"] } + assert index_2.unique + end +end diff --git a/test/cases/change_column_null_test_sqlserver.rb b/test/cases/change_column_null_test_sqlserver.rb new file mode 100644 index 000000000..1cd47de87 --- /dev/null +++ b/test/cases/change_column_null_test_sqlserver.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" +require "migrations/create_clients_and_change_column_null" + +class ChangeColumnNullTestSqlServer < ActiveRecord::TestCase + before do + @old_verbose = ActiveRecord::Migration.verbose + ActiveRecord::Migration.verbose = false + CreateClientsAndChangeColumnNull.new.up + end + + after do + CreateClientsAndChangeColumnNull.new.down + ActiveRecord::Migration.verbose = @old_verbose + end + + def find_column(table, name) + table.find { |column| column.name == name } + end + + let(:clients_table) { connection.columns("clients") } + let(:name_column) { find_column(clients_table, "name") } + let(:code_column) { find_column(clients_table, "code") } + let(:value_column) { find_column(clients_table, "value") } + + describe "#change_column_null" do + it "does not change the column limit" do + _(name_column.limit).must_equal 15 + end + + it "does not change the column default" do + _(code_column.default).must_equal "n/a" + end + + it "does not change the column precision" do + _(value_column.precision).must_equal 32 + end + + it "does not change the column scale" do + _(value_column.scale).must_equal 8 + end + end +end diff --git a/test/cases/coerced_tests.rb b/test/cases/coerced_tests.rb new file mode 100644 index 000000000..0615ff254 --- /dev/null +++ b/test/cases/coerced_tests.rb @@ -0,0 +1,2839 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +require "models/author" +require "models/book" +require "models/car" +require "models/citation" +require "models/comment" +require "models/computer" +require "models/customer" +require "models/dashboard" +require "models/developer" +require "models/event" +require "models/non_primary_key" +require "models/post" +require "models/tag" +require "models/task" +require "models/topic" + +class UniquenessValidationTest < ActiveRecord::TestCase + # So sp_executesql swallows this exception. Run without prepared to see it. + coerce_tests! :test_validate_uniqueness_with_limit + def test_validate_uniqueness_with_limit_coerced + connection.unprepared_statement do + assert_raise(ActiveRecord::ValueTooLong) do + Event.create(title: "abcdefgh") + end + end + end + + # So sp_executesql swallows this exception. Run without prepared to see it. + coerce_tests! :test_validate_uniqueness_with_limit_and_utf8 + def test_validate_uniqueness_with_limit_and_utf8_coerced + connection.unprepared_statement do + assert_raise(ActiveRecord::ValueTooLong) do + Event.create(title: "一二三四五六七八") + end + end + end + + # Same as original coerced test except that it handles default SQL Server case-insensitive collation. + coerce_tests! :test_validate_uniqueness_by_default_database_collation + def test_validate_uniqueness_by_default_database_collation_coerced + Topic.validates_uniqueness_of(:author_email_address) + + topic1 = Topic.new(author_email_address: "david@loudthinking.com") + topic2 = Topic.new(author_email_address: "David@loudthinking.com") + + assert_equal 1, Topic.where(author_email_address: "david@loudthinking.com").count + + assert_not topic1.valid? + assert_not topic1.save + + # Case insensitive collation (SQL_Latin1_General_CP1_CI_AS) by default. + # Should not allow "David" if "david" exists. + assert_not topic2.valid? + assert_not topic2.save + + assert_equal 1, Topic.where(author_email_address: "david@loudthinking.com").count + assert_equal 1, Topic.where(author_email_address: "David@loudthinking.com").count + end +end + +class UniquenessValidationWithIndexTest < ActiveRecord::TestCase + # Need to explicitly set the WHERE clause to truthy. + coerce_tests! :test_partial_index + def test_partial_index_coerced + Topic.validates_uniqueness_of(:title) + @connection.add_index(:topics, :title, unique: true, where: "approved=1", name: :topics_index) + + t = Topic.create!(title: "abc") + t.author_name = "John" + assert_queries_count(1) do + t.valid? + end + end +end + +module ActiveRecord + class AdapterTest < ActiveRecord::TestCase + # Legacy binds are not supported. + coerce_tests! :test_select_all_insert_update_delete_with_casted_binds + + # As far as I can tell, SQL Server does not support null bytes in strings. + coerce_tests! :test_update_prepared_statement + + # So sp_executesql swallows this exception. Run without prepared to see it. + coerce_tests! :test_value_limit_violations_are_translated_to_specific_exception + def test_value_limit_violations_are_translated_to_specific_exception_coerced + connection.unprepared_statement do + error = assert_raises(ActiveRecord::ValueTooLong) do + Event.create(title: "abcdefgh") + end + assert_not_nil error.cause + end + end + end +end + +module ActiveRecord + class AdapterPreventWritesTest < ActiveRecord::TestCase + # Fix randomly failing test. The loading of the model's schema was affecting the test. + coerce_tests! :test_errors_when_an_insert_query_is_called_while_preventing_writes + def test_errors_when_an_insert_query_is_called_while_preventing_writes_coerced + Subscriber.send(:load_schema!) + original_test_errors_when_an_insert_query_is_called_while_preventing_writes + end + + # Fix randomly failing test. The loading of the model's schema was affecting the test. + coerce_tests! :test_errors_when_an_insert_query_prefixed_by_a_double_dash_comment_containing_read_command_is_called_while_preventing_writes + def test_errors_when_an_insert_query_prefixed_by_a_double_dash_comment_containing_read_command_is_called_while_preventing_writes_coerced + Subscriber.send(:load_schema!) + original_test_errors_when_an_insert_query_prefixed_by_a_double_dash_comment_containing_read_command_is_called_while_preventing_writes + end + + # Fix randomly failing test. The loading of the model's schema was affecting the test. + coerce_tests! :test_errors_when_an_insert_query_prefixed_by_a_double_dash_comment_is_called_while_preventing_writes + def test_errors_when_an_insert_query_prefixed_by_a_double_dash_comment_is_called_while_preventing_writes_coerced + Subscriber.send(:load_schema!) + original_test_errors_when_an_insert_query_prefixed_by_a_double_dash_comment_is_called_while_preventing_writes + end + + # Invalid character encoding causes `ActiveRecord::StatementInvalid` error similar to Postgres. + coerce_tests! :test_doesnt_error_when_a_select_query_has_encoding_errors + def test_doesnt_error_when_a_select_query_has_encoding_errors_coerced + ActiveRecord::Base.while_preventing_writes do + # TinyTDS fail on encoding errors. + # But at least we can assert it fails in the client and not before when trying to match the query. + assert_raises ActiveRecord::StatementInvalid do + @connection.select_all("SELECT '\xC8'") + end + end + end + end +end + +module ActiveRecord + class AdapterTestWithoutTransaction < ActiveRecord::TestCase + # SQL Server does not allow truncation of tables that are referenced by foreign key + # constraints. So manually remove/add foreign keys in test. + coerce_tests! :test_truncate_tables + def test_truncate_tables_coerced + # Remove foreign key constraint to allow truncation. + @connection.remove_foreign_key :authors, :author_addresses + + assert_operator Post.count, :>, 0 + assert_operator Author.count, :>, 0 + assert_operator AuthorAddress.count, :>, 0 + + @connection.truncate_tables("author_addresses", "authors", "posts") + + assert_equal 0, Post.count + assert_equal 0, Author.count + assert_equal 0, AuthorAddress.count + ensure + reset_fixtures("posts", "authors", "author_addresses") + + # Restore foreign key constraint. + @connection.add_foreign_key :authors, :author_addresses + end + + # SQL Server does not allow truncation of tables that are referenced by foreign key + # constraints. So manually remove/add foreign keys in test. + coerce_tests! :test_truncate_tables_with_query_cache + def test_truncate_tables_with_query_cache + # Remove foreign key constraint to allow truncation. + @connection.remove_foreign_key :authors, :author_addresses + + @connection.enable_query_cache! + + assert_operator Post.count, :>, 0 + assert_operator Author.count, :>, 0 + assert_operator AuthorAddress.count, :>, 0 + + @connection.truncate_tables("author_addresses", "authors", "posts") + + assert_equal 0, Post.count + assert_equal 0, Author.count + assert_equal 0, AuthorAddress.count + ensure + reset_fixtures("posts", "authors", "author_addresses") + @connection.disable_query_cache! + + # Restore foreign key constraint. + @connection.add_foreign_key :authors, :author_addresses + end + end +end + +class AttributeMethodsTest < ActiveRecord::TestCase + # Use IFF for boolean statement in SELECT + coerce_tests! %r{typecast attribute from select to false} + def test_typecast_attribute_from_select_to_false_coerced + Topic.create(title: "Budget") + topic = Topic.all.merge!(select: "topics.*, IIF (1 = 2, 1, 0) as is_test").first + assert_not_predicate topic, :is_test? + end + + # Use IFF for boolean statement in SELECT + coerce_tests! %r{typecast attribute from select to true} + def test_typecast_attribute_from_select_to_true_coerced + Topic.create(title: "Budget") + topic = Topic.all.merge!(select: "topics.*, IIF (1 = 1, 1, 0) as is_test").first + assert_predicate topic, :is_test? + end +end + +class BasicsTest < ActiveRecord::TestCase + # Use square brackets as SQL Server escaped character + coerce_tests! :test_column_names_are_escaped + def test_column_names_are_escaped_coerced + conn = ActiveRecord::Base.lease_connection + assert_equal "[t]]]", conn.quote_column_name("t]") + end + + # Just like PostgreSQLAdapter does. + coerce_tests! :test_respect_internal_encoding + + # Caused in Rails v4.2.5 by adding `firm_id` column in this http://git.io/vBfMs + # commit. Trust Rails has this covered. + coerce_tests! :test_find_keeps_multiple_group_values + + def test_update_date_time_attributes + Time.use_zone("Eastern Time (US & Canada)") do + topic = Topic.find(1) + time = Time.zone.parse("2017-07-17 10:56") + topic.update!(written_on: time) + assert_equal(time, topic.written_on) + end + end + + def test_update_date_time_attributes_with_default_timezone_local + with_env_tz "America/New_York" do + with_timezone_config default: :local do + Time.use_zone("Eastern Time (US & Canada)") do + topic = Topic.find(1) + time = Time.zone.parse("2017-07-17 10:56") + topic.update!(written_on: time) + assert_equal(time, topic.written_on) + end + end + end + end +end + +class BelongsToAssociationsTest < ActiveRecord::TestCase + # Since @client.firm is a single first/top, and we use FETCH the order clause is used. + coerce_tests! :test_belongs_to_does_not_use_order_by + + # Square brackets around column name + coerce_tests! :test_belongs_to_with_primary_key_joins_on_correct_column + def test_belongs_to_with_primary_key_joins_on_correct_column_coerced + sql = Client.joins(:firm_with_primary_key).to_sql + assert_no_match(/\[firm_with_primary_keys_companies\]\.\[id\]/, sql) + assert_match(/\[firm_with_primary_keys_companies\]\.\[name\]/, sql) + end + + # Asserted SQL to get one row different from original test. + coerce_tests! :test_belongs_to + def test_belongs_to_coerced + client = Client.find(3) + first_firm = companies(:first_firm) + assert_queries_and_values_match(/FETCH NEXT @3 ROWS ONLY/, ["Firm", "Agency", 1, 1]) do + assert_equal first_firm, client.firm + assert_equal first_firm.name, client.firm.name + end + end +end + +module ActiveRecord + class BindParameterTest < ActiveRecord::TestCase + # SQL Server adapter does not use a statement cache as query plans are already reused using `EXEC sp_executesql`. + coerce_tests! :test_statement_cache + coerce_tests! :test_statement_cache_with_query_cache + coerce_tests! :test_statement_cache_with_find + coerce_tests! :test_statement_cache_with_find_by + coerce_tests! :test_statement_cache_with_in_clause + coerce_tests! :test_statement_cache_with_sql_string_literal + end +end + +module ActiveRecord + class InstrumentationTest < ActiveRecord::TestCase + # Fix randomly failing test. The loading of the model's schema was affecting the test. + coerce_tests! :test_payload_name_on_load + def test_payload_name_on_load_coerced + Book.send(:load_schema!) + original_test_payload_name_on_load + end + + # Need to remove index as SQL Server considers NULLs on a unique-index to be equal unlike PostgreSQL/MySQL/SQLite. + coerce_tests! :test_payload_row_count_on_select_all + def test_payload_row_count_on_select_all_coerced + connection.remove_index(:books, column: [:author_id, :name]) + + original_test_payload_row_count_on_select_all + ensure + Book.where(author_id: nil, name: "row count book 1").delete_all + Book.lease_connection.add_index(:books, [:author_id, :name], unique: true) + end + + # Need to remove index as SQL Server considers NULLs on a unique-index to be equal unlike PostgreSQL/MySQL/SQLite. + coerce_tests! :test_payload_row_count_on_pluck + def test_payload_row_count_on_pluck_coerced + connection.remove_index(:books, column: [:author_id, :name]) + + original_test_payload_row_count_on_pluck + ensure + Book.where(author_id: nil, name: "row count book 2").delete_all + Book.lease_connection.add_index(:books, [:author_id, :name], unique: true) + end + + # Need to remove index as SQL Server considers NULLs on a unique-index to be equal unlike PostgreSQL/MySQL/SQLite. + coerce_tests! :test_payload_row_count_on_raw_sql + def test_payload_row_count_on_raw_sql_coerced + connection.remove_index(:books, column: [:author_id, :name]) + + original_test_payload_row_count_on_raw_sql + ensure + Book.where(author_id: nil, name: "row count book 3").delete_all + Book.lease_connection.add_index(:books, [:author_id, :name], unique: true) + end + + # Fix randomly failing test. The loading of the model's schema was affecting the test. + coerce_tests! :test_payload_affected_rows + def test_payload_affected_rows_coerced + Book.create!(name: "TEMP RECORD TO RUN SCHEMA QUERIES").destroy! + original_test_payload_affected_rows + end + end +end + +class CalculationsTest < ActiveRecord::TestCase + # SELECT columns must be in the GROUP clause. + coerce_tests! :test_should_count_with_group_by_qualified_name_on_loaded + def test_should_count_with_group_by_qualified_name_on_loaded_coerced + accounts = Account.group("accounts.id").select("accounts.id") + + expected = {1 => 1, 2 => 1, 3 => 1, 4 => 1, 5 => 1, 6 => 1} + + assert_not_predicate accounts, :loaded? + assert_equal expected, accounts.count + + accounts.load + + assert_predicate accounts, :loaded? + assert_equal expected, accounts.count(:id) + end + + # Fix randomly failing test. The loading of the model's schema was affecting the test. + coerce_tests! :test_offset_is_kept + def test_offset_is_kept_coerced + Account.send(:load_schema!) + original_test_offset_is_kept + end + + # The SQL Server `AVG()` function for a list of integers returns an integer (not a decimal). + coerce_tests! :test_should_return_decimal_average_of_integer_field + def test_should_return_decimal_average_of_integer_field_coerced + value = Account.average(:id) + assert_equal 3, value + end + + # In SQL Server the `AVG()` function for a list of integers returns an integer so need to cast values as decimals before averaging. + # Match SQL Server limit implementation. + coerce_tests! :test_select_avg_with_group_by_as_virtual_attribute_with_sql + def test_select_avg_with_group_by_as_virtual_attribute_with_sql_coerced + rails_core = companies(:rails_core) + + sql = <<~SQL + SELECT firm_id, AVG(CAST(credit_limit AS DECIMAL)) AS avg_credit_limit + FROM accounts + WHERE firm_id = ? + GROUP BY firm_id + ORDER BY firm_id + OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY + SQL + + account = Account.find_by_sql([sql, rails_core]).first + + # id was not selected, so it should be nil + # (cannot select id because it wasn't used in the GROUP BY clause) + assert_nil account.id + + # firm_id was explicitly selected, so it should be present + assert_equal(rails_core, account.firm) + + # avg_credit_limit should be present as a virtual attribute + assert_equal(52.5, account.avg_credit_limit) + end + + # In SQL Server the `AVG()` function for a list of integers returns an integer so need to cast values as decimals before averaging. + # Order column must be in the GROUP clause. + coerce_tests! :test_select_avg_with_group_by_as_virtual_attribute_with_ar + def test_select_avg_with_group_by_as_virtual_attribute_with_ar_coerced + rails_core = companies(:rails_core) + + account = Account + .select(:firm_id, "AVG(CAST(credit_limit AS DECIMAL)) AS avg_credit_limit") + .where(firm: rails_core) + .group(:firm_id) + .order(:firm_id) + .take! + + # id was not selected, so it should be nil + # (cannot select id because it wasn't used in the GROUP BY clause) + assert_nil account.id + + # firm_id was explicitly selected, so it should be present + assert_equal(rails_core, account.firm) + + # avg_credit_limit should be present as a virtual attribute + assert_equal(52.5, account.avg_credit_limit) + end + + # In SQL Server the `AVG()` function for a list of integers returns an integer so need to cast values as decimals before averaging. + # SELECT columns must be in the GROUP clause. + # Match SQL Server limit implementation. + coerce_tests! :test_select_avg_with_joins_and_group_by_as_virtual_attribute_with_sql + def test_select_avg_with_joins_and_group_by_as_virtual_attribute_with_sql_coerced + rails_core = companies(:rails_core) + + sql = <<~SQL + SELECT companies.*, AVG(CAST(accounts.credit_limit AS DECIMAL)) AS avg_credit_limit + FROM companies + INNER JOIN accounts ON companies.id = accounts.firm_id + WHERE companies.id = ? + GROUP BY companies.id, companies.type, companies.firm_id, companies.firm_name, companies.name, companies.client_of, companies.rating, companies.account_id, companies.description, companies.status + ORDER BY companies.id + OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY + SQL + + firm = DependentFirm.find_by_sql([sql, rails_core]).first + + # all the DependentFirm attributes should be present + assert_equal rails_core, firm + assert_equal rails_core.name, firm.name + + # avg_credit_limit should be present as a virtual attribute + assert_equal(52.5, firm.avg_credit_limit) + end + + # In SQL Server the `AVG()` function for a list of integers returns an integer so need to cast values as decimals before averaging. + # SELECT columns must be in the GROUP clause. + coerce_tests! :test_select_avg_with_joins_and_group_by_as_virtual_attribute_with_ar + def test_select_avg_with_joins_and_group_by_as_virtual_attribute_with_ar_coerced + rails_core = companies(:rails_core) + + firm = DependentFirm + .select("companies.*", "AVG(CAST(accounts.credit_limit AS DECIMAL)) AS avg_credit_limit") + .where(id: rails_core) + .joins(:account) + .group(:id, :type, :firm_id, :firm_name, :name, :client_of, :rating, :account_id, :description, :status) + .take! + + # all the DependentFirm attributes should be present + assert_equal rails_core, firm + assert_equal rails_core.name, firm.name + + # avg_credit_limit should be present as a virtual attribute + assert_equal(52.5, firm.avg_credit_limit) + end + + # Match SQL Server limit implementation + coerce_tests! :test_limit_is_kept + def test_limit_is_kept_coerced + queries = capture_sql { Account.limit(1).count } + assert_equal 1, queries.length + assert_match(/ORDER BY \[accounts\]\.\[id\] ASC OFFSET 0 ROWS FETCH NEXT @0 ROWS ONLY/, queries.first) + end + + # Match SQL Server limit implementation + coerce_tests! :test_limit_with_offset_is_kept + def test_limit_with_offset_is_kept_coerced + queries = capture_sql { Account.limit(1).offset(1).count } + assert_equal 1, queries.length + assert_match(/ORDER BY \[accounts\]\.\[id\] ASC OFFSET @0 ROWS FETCH NEXT @1 ROWS ONLY/, queries.first) + end + + # SQL Server needs an alias for the calculated column + coerce_tests! :test_distinct_count_all_with_custom_select_and_order + def test_distinct_count_all_with_custom_select_and_order_coerced + accounts = Account.distinct.select("credit_limit % 10 AS the_limit").order(Arel.sql("credit_limit % 10")) + assert_queries_count(1) { assert_equal 3, accounts.count(:all) } + assert_queries_count(1) { assert_equal 3, accounts.load.size } + end + + # Leave it up to users to format selects/functions so HAVING works correctly. + coerce_tests! :test_having_with_strong_parameters + + # SELECT columns must be in the GROUP clause. Since since `ids` only selects the primary key you cannot perform this query in SQL Server. + coerce_tests! :test_ids_with_includes_and_non_primary_key_order + + # To limit the results in SQL Server we use `FETCH NEXT @0 ROWS ONLY` instead of `LIMIT @0`. To use `FETCH NEXT` an order must be provided. + coerce_tests! :test_no_order_by_when_counting_all +end + +module ActiveRecord + class Migration + class ChangeSchemaTest < ActiveRecord::TestCase + # Integer.default is a number and not a string + coerce_tests! :test_create_table_with_defaults + def test_create_table_with_defaults_coerce + connection.create_table :testings do |t| + t.column :one, :string, default: "hello" + t.column :two, :boolean, default: true + t.column :three, :boolean, default: false + t.column :four, :integer, default: 1 + t.column :five, :text, default: "hello" + end + + columns = connection.columns(:testings) + one = columns.detect { |c| c.name == "one" } + two = columns.detect { |c| c.name == "two" } + three = columns.detect { |c| c.name == "three" } + four = columns.detect { |c| c.name == "four" } + five = columns.detect { |c| c.name == "five" } + + assert_equal "hello", one.default + assert_equal true, two.fetch_cast_type(connection).deserialize(two.default) + assert_equal false, three.fetch_cast_type(connection).deserialize(three.default) + assert_equal 1, four.default + assert_equal "hello", five.default + end + + # Use precision 6 by default for datetime/timestamp columns. SQL Server uses `datetime2` for date-times with precision. + coerce_tests! :test_add_column_with_postgresql_datetime_type + def test_add_column_with_postgresql_datetime_type_coerced + connection.create_table :testings do |t| + t.column :foo, :datetime + end + + column = connection.columns(:testings).find { |c| c.name == "foo" } + + assert_equal :datetime, column.type + assert_equal "datetime2(6)", column.sql_type + end + + # Use precision 6 by default for datetime/timestamp columns. SQL Server uses `datetime2` for date-times with precision. + coerce_tests! :test_change_column_with_timestamp_type + def test_change_column_with_timestamp_type_coerced + connection.create_table :testings do |t| + t.column :foo, :datetime, null: false + end + + connection.change_column :testings, :foo, :timestamp + + column = connection.columns(:testings).find { |c| c.name == "foo" } + + assert_equal :datetime, column.type + assert_equal "datetime2(6)", column.sql_type + end + + # Use precision 6 by default for datetime/timestamp columns. SQL Server uses `datetime2` for date-times with precision. + coerce_tests! :test_add_column_with_timestamp_type + def test_add_column_with_timestamp_type_coerced + connection.create_table :testings do |t| + t.column :foo, :timestamp + end + + column = connection.columns(:testings).find { |c| c.name == "foo" } + + assert_equal :datetime, column.type + assert_equal "datetime2(6)", column.sql_type + end + end + end +end + +module ActiveRecord + class Migration + class ColumnAttributesTest < ActiveRecord::TestCase + # We have a default 4000 varying character limit. + coerce_tests! :test_add_column_without_limit + def test_add_column_without_limit_coerced + add_column :test_models, :description, :string, limit: nil + TestModel.reset_column_information + _(TestModel.columns_hash["description"].limit).must_equal 4000 + end + end + end +end + +module ActiveRecord + class Migration + class ColumnsTest < ActiveRecord::TestCase + # Our defaults are real 70000 integers vs '70000' strings. + coerce_tests! :test_rename_column_preserves_default_value_not_null + def test_rename_column_preserves_default_value_not_null_coerced + add_column "test_models", "salary", :integer, default: 70000 + default_before = connection.columns("test_models").find { |c| c.name == "salary" }.default + assert_equal 70000, default_before + rename_column "test_models", "salary", "annual_salary" + TestModel.reset_column_information + assert TestModel.column_names.include?("annual_salary") + default_after = connection.columns("test_models").find { |c| c.name == "annual_salary" }.default + assert_equal 70000, default_after + end + + # Dropping the column removes the single index. + coerce_tests! :test_remove_column_with_multi_column_index + def test_remove_column_with_multi_column_index_coerced + add_column "test_models", :hat_size, :integer + add_column "test_models", :hat_style, :string, limit: 100 + add_index "test_models", ["hat_style", "hat_size"], unique: true + assert_equal 1, connection.indexes("test_models").size + remove_column("test_models", "hat_size") + assert_equal [], connection.indexes("test_models").map(&:name) + end + + # Choose `StatementInvalid` vs `ActiveRecordError`. + coerce_tests! :test_rename_nonexistent_column + def test_rename_nonexistent_column_coerced + exception = ActiveRecord::StatementInvalid + assert_raise(exception) do + rename_column "test_models", "nonexistent", "should_fail" + end + end + end + end +end + +class MigrationTest < ActiveRecord::TestCase + # For some reason our tests set Rails.@_env which breaks test env switching. + coerce_tests! :test_internal_metadata_stores_environment_when_other_data_exists + coerce_tests! :test_internal_metadata_stores_environment + + # Same as original but using binary type instead of blob + coerce_tests! :test_add_column_with_casted_type_if_not_exists_set_to_true + def test_add_column_with_casted_type_if_not_exists_set_to_true_coerced + migration_a = Class.new(ActiveRecord::Migration::Current) { + def version + 100 + end + + def migrate(x) + add_column "people", "last_name", :binary + end + }.new + + migration_b = Class.new(ActiveRecord::Migration::Current) { + def version + 101 + end + + def migrate(x) + add_column "people", "last_name", :binary, if_not_exists: true + end + }.new + + ActiveRecord::Migrator.new(:up, [migration_a], @schema_migration, @internal_metadata, 100).migrate + assert_column Person, :last_name, "migration_a should have created the last_name column on people" + + assert_nothing_raised do + ActiveRecord::Migrator.new(:up, [migration_b], @schema_migration, @internal_metadata, 101).migrate + end + ensure + Person.reset_column_information + if Person.column_names.include?("last_name") + Person.lease_connection.remove_column("people", "last_name") + end + end +end + +module ActiveRecord + class Migration + class CompatibilityTest < ActiveRecord::TestCase + # Error message depends on the database adapter. + coerce_tests! :test_create_table_on_7_0 + def test_create_table_on_7_0_coerced + long_table_name = "a" * (connection.table_name_length + 1) + migration = Class.new(ActiveRecord::Migration[7.0]) { + @@long_table_name = long_table_name + def version + 100 + end + + def migrate(x) + create_table @@long_table_name + end + }.new + + error = assert_raises(StandardError) do + ActiveRecord::Migrator.new(:up, [migration], @schema_migration, @internal_metadata).migrate + end + assert_match(/The identifier that starts with '#{long_table_name[0...-1]}' is too long/i, error.message) + ensure + begin + connection.drop_table(long_table_name) + rescue + nil + end + end + + # SQL Server truncates long table names when renaming (https://learn.microsoft.com/en-us/sql/relational-databases/system-stored-procedures/sp-rename-transact-sql?view=sql-server-ver16). + coerce_tests! :test_rename_table_on_7_0 + def test_rename_table_on_7_0_coerced + long_table_name = "a" * (connection.table_name_length + 1) + connection.create_table(:more_testings) + + migration = Class.new(ActiveRecord::Migration[7.0]) { + @@long_table_name = long_table_name + def version + 100 + end + + def migrate(x) + rename_table :more_testings, @@long_table_name + end + }.new + + ActiveRecord::Migrator.new(:up, [migration], @schema_migration, @internal_metadata).migrate + assert connection.table_exists?(long_table_name[0...-1]) + assert_not connection.table_exists?(:more_testings) + assert connection.table_exists?(long_table_name[0...-1]) + ensure + begin + connection.drop_table(:more_testings) + rescue + nil + end + begin + connection.drop_table(long_table_name[0...-1]) + rescue + nil + end + end + + # SQL Server has a different maximum index name length. + coerce_tests! :test_add_index_errors_on_too_long_name_7_0 + def test_add_index_errors_on_too_long_name_7_0_coerced + long_index_name = "a" * (connection.index_name_length + 1) + + migration = Class.new(ActiveRecord::Migration[7.0]) { + @@long_index_name = long_index_name + def migrate(x) + add_column :testings, :very_long_column_name_to_test_with, :string + add_index :testings, [:foo, :bar, :very_long_column_name_to_test_with], name: @@long_index_name + end + }.new + + error = assert_raises(StandardError) do + ActiveRecord::Migrator.new(:up, [migration], @schema_migration, @internal_metadata).migrate + end + assert_match(/Index name '#{long_index_name}' on table 'testings' is too long/i, error.message) + end + + # SQL Server has a different maximum index name length. + coerce_tests! :test_create_table_add_index_errors_on_too_long_name_7_0 + def test_create_table_add_index_errors_on_too_long_name_7_0_coerced + long_index_name = "a" * (connection.index_name_length + 1) + + migration = Class.new(ActiveRecord::Migration[7.0]) { + @@long_index_name = long_index_name + def migrate(x) + create_table :more_testings do |t| + t.integer :foo + t.integer :bar + t.integer :very_long_column_name_to_test_with + t.index [:foo, :bar, :very_long_column_name_to_test_with], name: @@long_index_name + end + end + }.new + + error = assert_raises(StandardError) do + ActiveRecord::Migrator.new(:up, [migration], @schema_migration, @internal_metadata).migrate + end + assert_match(/Index name '#{long_index_name}' on table 'more_testings' is too long/i, error.message) + ensure + begin + connection.drop_table :more_testings + rescue + nil + end + end + + # Foreign key count is the same as PostgreSQL/SQLite. + coerce_tests! :test_remove_foreign_key_on_8_0 + def test_remove_foreign_key_on_8_0_coerced + connection.create_table(:sub_testings) do |t| + t.references :testing, foreign_key: true, type: :bigint + t.references :experiment, foreign_key: {to_table: :testings}, type: :bigint + end + + migration = Class.new(ActiveRecord::Migration[8.0]) do + def up + change_table(:sub_testings) do |t| + t.remove_foreign_key :testings + t.remove_foreign_key :testings, column: :experiment_id + end + end + end + + assert_raise(StandardError, match: /Table 'sub_testings' has no foreign key for testings$/) { + ActiveRecord::Migrator.new(:up, [migration], @schema_migration, @internal_metadata).migrate + } + + foreign_keys = @connection.foreign_keys("sub_testings") + assert_equal 2, foreign_keys.size + ensure + connection.drop_table(:sub_testings, if_exists: true) + ActiveRecord::Base.clear_cache! + end + end + end +end + +class CoreTest < ActiveRecord::TestCase + # I think fixtures are using the wrong time zone and the `:first` + # `topics`.`bonus_time` attribute of 2005-01-30t15:28:00.00+01:00 is + # getting local EST time for me and set to "09:28:00.0000000". + coerce_tests! :test_pretty_print_persisted +end + +module ActiveRecord + module ConnectionAdapters + # Just like PostgreSQLAdapter does. + TypeLookupTest.coerce_all_tests! if defined?(TypeLookupTest) + + # All sorts of errors due to how we test. Even setting ENV['RAILS_ENV'] to + # a value of 'default_env' will still show tests failing. Just ignoring all + # of them since we have no monkey in this circus. + MergeAndResolveDefaultUrlConfigTest.coerce_all_tests! if defined?(MergeAndResolveDefaultUrlConfigTest) + ConnectionHandlerTest.coerce_all_tests! if defined?(ConnectionHandlerTest) + end +end + +module ActiveRecord + # The original module is hardcoded for PostgreSQL/SQLite/MySQL tests. + module DatabaseTasksSetupper + undef_method :setup + def setup + @sqlserver_tasks = + Class.new do + def create + end + + def drop + end + + def purge + end + + def charset + end + + def collation + end + + def structure_dump(*) + end + + def structure_load(*) + end + end.new + + $stdout, @original_stdout = StringIO.new, $stdout + $stderr, @original_stderr = StringIO.new, $stderr + end + + undef_method :with_stubbed_new + def with_stubbed_new + ActiveRecord::Tasks::SQLServerDatabaseTasks.stub(:new, @sqlserver_tasks) do + yield + end + end + end + + class DatabaseTasksCreateTest < ActiveRecord::TestCase + # Coerce PostgreSQL/SQLite/MySQL tests. + coerce_all_tests! + + def test_sqlserver_create + with_stubbed_new do + assert_called(eval("@sqlserver_tasks", binding, __FILE__, __LINE__), :create) do + ActiveRecord::Tasks::DatabaseTasks.create "adapter" => :sqlserver + end + end + end + end + + class DatabaseTasksDropTest < ActiveRecord::TestCase + # Coerce PostgreSQL/SQLite/MySQL tests. + coerce_all_tests! + + def test_sqlserver_drop + with_stubbed_new do + assert_called(eval("@sqlserver_tasks", binding, __FILE__, __LINE__), :drop) do + ActiveRecord::Tasks::DatabaseTasks.drop "adapter" => :sqlserver + end + end + end + end + + class DatabaseTasksPurgeTest < ActiveRecord::TestCase + # Coerce PostgreSQL/SQLite/MySQL tests. + coerce_all_tests! + + def test_sqlserver_purge + with_stubbed_new do + assert_called(eval("@sqlserver_tasks", binding, __FILE__, __LINE__), :purge) do + ActiveRecord::Tasks::DatabaseTasks.purge "adapter" => :sqlserver + end + end + end + end + + class DatabaseTasksCharsetTest < ActiveRecord::TestCase + # Coerce PostgreSQL/SQLite/MySQL tests. + coerce_all_tests! + + def test_sqlserver_charset + with_stubbed_new do + assert_called(eval("@sqlserver_tasks", binding, __FILE__, __LINE__), :charset) do + ActiveRecord::Tasks::DatabaseTasks.charset "adapter" => :sqlserver + end + end + end + end + + class DatabaseTasksCollationTest < ActiveRecord::TestCase + # Coerce PostgreSQL/SQLite/MySQL tests. + coerce_all_tests! + + def test_sqlserver_collation + with_stubbed_new do + assert_called(eval("@sqlserver_tasks", binding, __FILE__, __LINE__), :collation) do + ActiveRecord::Tasks::DatabaseTasks.collation "adapter" => :sqlserver + end + end + end + end + + class DatabaseTasksStructureDumpTest < ActiveRecord::TestCase + # Coerce PostgreSQL/SQLite/MySQL tests. + coerce_all_tests! + + def test_sqlserver_structure_dump + with_stubbed_new do + assert_called_with( + eval("@sqlserver_tasks", binding, __FILE__, __LINE__), :structure_dump, + ["awesome-file.sql", nil] + ) do + ActiveRecord::Tasks::DatabaseTasks.structure_dump({"adapter" => :sqlserver}, "awesome-file.sql") + end + end + end + end + + class DatabaseTasksStructureLoadTest < ActiveRecord::TestCase + # Coerce PostgreSQL/SQLite/MySQL tests. + coerce_all_tests! + + def test_sqlserver_structure_load + with_stubbed_new do + assert_called_with( + eval("@sqlserver_tasks", binding, __FILE__, __LINE__), + :structure_load, + ["awesome-file.sql", nil] + ) do + ActiveRecord::Tasks::DatabaseTasks.structure_load({"adapter" => :sqlserver}, "awesome-file.sql") + end + end + end + end + + class DatabaseTasksCreateAllTest < ActiveRecord::TestCase + # We extend `local_database?` so that common VM IPs can be used. + coerce_tests! :test_ignores_remote_databases, :test_warning_for_remote_databases + end + + class DatabaseTasksDropAllTest < ActiveRecord::TestCase + # We extend `local_database?` so that common VM IPs can be used. + coerce_tests! :test_ignores_remote_databases, :test_warning_for_remote_databases + end +end + +class DefaultScopingTest < ActiveRecord::TestCase + # We are not doing order duplicate removal anymore. + coerce_tests! :test_order_in_default_scope_should_not_prevail +end + +class EagerAssociationTest < ActiveRecord::TestCase + # Use LEN() instead of LENGTH() function. + coerce_tests! :test_count_with_include + def test_count_with_include_coerced + assert_equal 3, authors(:david).posts_with_comments.where("LEN(comments.body) > 15").references(:comments).count + end + + # The raw SQL in the scope uses `limit 1`. + coerce_tests! %r{including association based on sql condition and no database column} +end + +class FinderTest < ActiveRecord::TestCase + fixtures :customers, :topics, :authors + + # We have implicit ordering, via FETCH. + coerce_tests! %r{doesn't have implicit ordering}, + :test_find_doesnt_have_implicit_ordering + + # Assert SQL Server limit implementation + coerce_tests! :test_take_and_first_and_last_with_integer_should_use_sql_limit + def test_take_and_first_and_last_with_integer_should_use_sql_limit_coerced + assert_queries_and_values_match(/OFFSET 0 ROWS FETCH NEXT @0 ROWS ONLY/, [3]) { Topic.take(3).entries } + assert_queries_and_values_match(/OFFSET 0 ROWS FETCH NEXT @0 ROWS ONLY/, [2]) { Topic.first(2).entries } + assert_queries_and_values_match(/OFFSET 0 ROWS FETCH NEXT @0 ROWS ONLY/, [5]) { Topic.last(5).entries } + end + + # This fails only when run in the full test suite task. Just taking it out of the mix. + coerce_tests! :test_find_with_order_on_included_associations_with_construct_finder_sql_for_association_limiting_and_is_distinct + + # Can not use array condition due to not finding right type and hence fractional second quoting. + coerce_tests! :test_condition_utc_time_interpolation_with_default_timezone_local + def test_condition_utc_time_interpolation_with_default_timezone_local_coerced + with_env_tz "America/New_York" do + with_timezone_config default: :local do + topic = Topic.first + assert_equal topic, Topic.where(written_on: topic.written_on.getutc).first + end + end + end + + # Can not use array condition due to not finding right type and hence fractional second quoting. + coerce_tests! :test_condition_local_time_interpolation_with_default_timezone_utc + def test_condition_local_time_interpolation_with_default_timezone_utc_coerced + with_env_tz "America/New_York" do + with_timezone_config default: :utc do + topic = Topic.first + assert_equal topic, Topic.where(written_on: topic.written_on.getlocal).first + end + end + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! :test_include_on_unloaded_relation_with_match + def test_include_on_unloaded_relation_with_match_coerced + assert_queries_match(/1 AS one.*FETCH NEXT @2 ROWS ONLY/) do + assert_equal true, Customer.where(name: "David").include?(customers(:david)) + end + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! :test_include_on_unloaded_relation_without_match + def test_include_on_unloaded_relation_without_match_coerced + assert_queries_match(/1 AS one.*FETCH NEXT @2 ROWS ONLY/) do + assert_equal false, Customer.where(name: "David").include?(customers(:mary)) + end + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! :test_member_on_unloaded_relation_with_match + def test_member_on_unloaded_relation_with_match_coerced + assert_queries_match(/1 AS one.*FETCH NEXT @2 ROWS ONLY/) do + assert_equal true, Customer.where(name: "David").member?(customers(:david)) + end + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! :test_member_on_unloaded_relation_without_match + def test_member_on_unloaded_relation_without_match_coerced + assert_queries_match(/1 AS one.*FETCH NEXT @2 ROWS ONLY/) do + assert_equal false, Customer.where(name: "David").member?(customers(:mary)) + end + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! :test_implicit_order_column_is_configurable_with_a_single_value + def test_implicit_order_column_is_configurable_with_a_single_value_coerced + old_implicit_order_column = Topic.implicit_order_column + Topic.implicit_order_column = "title" + + assert_equal topics(:fifth), Topic.first + assert_equal topics(:third), Topic.last + + c = Topic.lease_connection + assert_queries_and_values_match(/ORDER BY #{Regexp.escape(c.quote_table_name("topics.title"))} DESC, #{Regexp.escape(c.quote_table_name("topics.id"))} DESC OFFSET 0 ROWS FETCH NEXT @0 ROWS ONLY/i, [1]) { + Topic.last + } + ensure + Topic.implicit_order_column = old_implicit_order_column + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! :test_implicit_order_column_is_configurable_with_multiple_values + def test_implicit_order_column_is_configurable_with_multiple_values_coerced + old_implicit_order_column = Topic.implicit_order_column + Topic.implicit_order_column = ["title", "author_name"] + + assert_queries_and_values_match(/ORDER BY #{Regexp.escape(quote_table_name("topics.title"))} DESC, #{Regexp.escape(quote_table_name("topics.author_name"))} DESC, #{Regexp.escape(quote_table_name("topics.id"))} DESC OFFSET 0 ROWS FETCH NEXT @0 ROWS ONLY/i, [1]) { + Topic.last + } + ensure + Topic.implicit_order_column = old_implicit_order_column + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! :test_ordering_does_not_append_primary_keys_or_query_constraints_if_passed_an_implicit_order_column_array_ending_in_nil + def test_ordering_does_not_append_primary_keys_or_query_constraints_if_passed_an_implicit_order_column_array_ending_in_nil_coerced + old_implicit_order_column = Topic.implicit_order_column + Topic.implicit_order_column = ["author_name", nil] + + assert_queries_and_values_match(/ORDER BY #{Regexp.escape(quote_table_name("topics.author_name"))} DESC OFFSET 0 ROWS FETCH NEXT @0 ROWS ONLY/i, [1]) { + Topic.last + } + ensure + Topic.implicit_order_column = old_implicit_order_column + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! :test_implicit_order_set_to_primary_key + def test_implicit_order_set_to_primary_key_coerced + old_implicit_order_column = Topic.implicit_order_column + Topic.implicit_order_column = "id" + + c = Topic.lease_connection + assert_queries_and_values_match(/ORDER BY #{Regexp.escape(c.quote_table_name("topics.id"))} DESC OFFSET 0 ROWS FETCH NEXT @0 ROWS ONLY/i, [1]) { + Topic.last + } + ensure + Topic.implicit_order_column = old_implicit_order_column + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! :test_implicit_order_for_model_without_primary_key + def test_implicit_order_for_model_without_primary_key_coerced + old_implicit_order_column = NonPrimaryKey.implicit_order_column + NonPrimaryKey.implicit_order_column = "created_at" + + c = NonPrimaryKey.lease_connection + + assert_queries_and_values_match(/ORDER BY #{Regexp.escape(c.quote_table_name("non_primary_keys.created_at"))} DESC OFFSET 0 ROWS FETCH NEXT @0 ROWS ONLY/i, [1]) { + NonPrimaryKey.last + } + ensure + NonPrimaryKey.implicit_order_column = old_implicit_order_column + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! :test_member_on_unloaded_relation_with_composite_primary_key + def test_member_on_unloaded_relation_with_composite_primary_key_coerced + assert_queries_match(/1 AS one.* FETCH NEXT @3 ROWS ONLY/) do + book = cpk_books(:cpk_great_author_first_book) + assert Cpk::Book.where(title: "The first book").member?(book) + end + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! :test_implicit_order_column_prepends_query_constraints + def test_implicit_order_column_prepends_query_constraints_coerced + c = ClothingItem.lease_connection + ClothingItem.implicit_order_column = "description" + quoted_type = Regexp.escape(c.quote_table_name("clothing_items.clothing_type")) + quoted_color = Regexp.escape(c.quote_table_name("clothing_items.color")) + quoted_descrption = Regexp.escape(c.quote_table_name("clothing_items.description")) + + assert_queries_match(/ORDER BY #{quoted_descrption} ASC, #{quoted_type} ASC, #{quoted_color} ASC OFFSET 0 ROWS FETCH NEXT @(\d) ROWS ONLY/i) do + assert_kind_of ClothingItem, ClothingItem.first + end + ensure + ClothingItem.implicit_order_column = nil + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! %r{#last for a model with composite query constraints} + test "#last for a model with composite query constraints coerced" do + c = ClothingItem.lease_connection + quoted_type = Regexp.escape(c.quote_table_name("clothing_items.clothing_type")) + quoted_color = Regexp.escape(c.quote_table_name("clothing_items.color")) + + assert_queries_match(/ORDER BY #{quoted_type} DESC, #{quoted_color} DESC OFFSET 0 ROWS FETCH NEXT @(\d) ROWS ONLY/i) do + assert_kind_of ClothingItem, ClothingItem.last + end + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! %r{#first for a model with composite query constraints} + test "#first for a model with composite query constraints coerced" do + c = ClothingItem.lease_connection + quoted_type = Regexp.escape(c.quote_table_name("clothing_items.clothing_type")) + quoted_color = Regexp.escape(c.quote_table_name("clothing_items.color")) + + assert_queries_match(/ORDER BY #{quoted_type} ASC, #{quoted_color} ASC OFFSET 0 ROWS FETCH NEXT @(\d) ROWS ONLY/i) do + assert_kind_of ClothingItem, ClothingItem.first + end + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! :test_implicit_order_column_reorders_query_constraints + def test_implicit_order_column_reorders_query_constraints_coerced + c = ClothingItem.lease_connection + ClothingItem.implicit_order_column = "color" + quoted_type = Regexp.escape(c.quote_table_name("clothing_items.clothing_type")) + quoted_color = Regexp.escape(c.quote_table_name("clothing_items.color")) + + assert_queries_match(/ORDER BY #{quoted_color} ASC, #{quoted_type} ASC OFFSET 0 ROWS FETCH NEXT @(\d) ROWS ONLY/i) do + assert_kind_of ClothingItem, ClothingItem.first + end + ensure + ClothingItem.implicit_order_column = nil + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! :test_include_on_unloaded_relation_with_composite_primary_key + def test_include_on_unloaded_relation_with_composite_primary_key_coerced + assert_queries_match(/1 AS one.*OFFSET 0 ROWS FETCH NEXT @(\d) ROWS ONLY/) do + book = cpk_books(:cpk_great_author_first_book) + assert Cpk::Book.where(title: "The first book").include?(book) + end + end + + # Check for `FETCH NEXT x ROWS` rather then `LIMIT`. + coerce_tests! :test_nth_to_last_with_order_uses_limit + def test_nth_to_last_with_order_uses_limit_coerced + c = Topic.lease_connection + assert_queries_match(/ORDER BY #{Regexp.escape(c.quote_table_name("topics.id"))} DESC OFFSET @(\d) ROWS FETCH NEXT @(\d) ROWS ONLY/i) do + Topic.second_to_last + end + + assert_queries_match(/ORDER BY #{Regexp.escape(c.quote_table_name("topics.updated_at"))} DESC OFFSET @(\d) ROWS FETCH NEXT @(\d) ROWS ONLY/i) do + Topic.order(:updated_at).second_to_last + end + end + + # SQL Server is unable to use aliased SELECT in the HAVING clause. + coerce_tests! :test_include_on_unloaded_relation_with_having_referencing_aliased_select +end + +module ActiveRecord + class Migration + class ForeignKeyTest < ActiveRecord::TestCase + # SQL Server does not support 'restrict' for 'on_update' or 'on_delete'. + coerce_tests! :test_add_on_delete_restrict_foreign_key + def test_add_on_delete_restrict_foreign_key_coerced + assert_raises ArgumentError do + @connection.add_foreign_key :astronauts, :rockets, column: "rocket_id", on_delete: :restrict + end + assert_raises ArgumentError do + @connection.add_foreign_key :astronauts, :rockets, column: "rocket_id", on_update: :restrict + end + end + + # SQL Server does not support 'restrict' for 'on_update' or 'on_delete'. + coerce_tests! :test_remove_foreign_key_with_restrict_action + + # Error message depends on the database adapter. + coerce_tests! :test_add_foreign_key_with_if_not_exists_not_set + def test_add_foreign_key_with_if_not_exists_not_set_coerced + @connection.add_foreign_key :astronauts, :rockets + assert_equal 1, @connection.foreign_keys("astronauts").size + + error = assert_raises do + @connection.add_foreign_key :astronauts, :rockets + end + + assert_match(/TinyTds::Error: There is already an object named '.*' in the database/, error.message) + end + end + end +end + +class HasOneAssociationsTest < ActiveRecord::TestCase + # We use OFFSET/FETCH vs TOP. So we always have an order. + coerce_tests! :test_has_one_does_not_use_order_by + + # Asserted SQL to get one row different from original test. + coerce_tests! :test_has_one + def test_has_one_coerced + firm = companies(:first_firm) + first_account = Account.find(1) + assert_queries_match(/FETCH NEXT @(\d) ROWS ONLY/) do + assert_equal first_account, firm.account + assert_equal first_account.credit_limit, firm.account.credit_limit + end + end +end + +class HasOneThroughAssociationsTest < ActiveRecord::TestCase + # Asserted SQL to get one row different from original test. + coerce_tests! :test_has_one_through_executes_limited_query + def test_has_one_through_executes_limited_query_coerced + boring_club = clubs(:boring_club) + assert_queries_match(/FETCH NEXT @(\d) ROWS ONLY/) do + assert_equal boring_club, @member.general_club + end + end +end + +class LeftOuterJoinAssociationTest < ActiveRecord::TestCase + # Uses || operator in SQL. Just trust core gets value out of this test. + coerce_tests! :test_does_not_override_select +end + +class NestedRelationScopingTest < ActiveRecord::TestCase + # Assert SQL Server limit implementation + coerce_tests! :test_merge_options + def test_merge_options_coerced + Developer.where("salary = 80000").scoping do + Developer.limit(10).scoping do + devs = Developer.all + sql = devs.to_sql + assert_match "(salary = 80000)", sql + assert_match "FETCH NEXT 10 ROWS ONLY", sql + end + end + end +end + +class PersistenceTest < ActiveRecord::TestCase + # Rails test required updating a identity column. + coerce_tests! :test_update_columns_changing_id + + # Rails test required updating a identity column. + coerce_tests! :test_update + def test_update_coerced + topic = Topic.find(1) + assert_not_predicate topic, :approved? + assert_equal "The First Topic", topic.title + + topic.update("approved" => true, "title" => "The First Topic Updated") + topic.reload + assert_predicate topic, :approved? + assert_equal "The First Topic Updated", topic.title + + topic.update(approved: false, title: "The First Topic") + topic.reload + assert_not_predicate topic, :approved? + assert_equal "The First Topic", topic.title + end + + # In SQL Server it's not possible to set the primary key column using a trigger and to get it then to return. + coerce_tests! :test_model_with_no_auto_populated_fields_still_returns_primary_key_after_insert +end + +class UpdateAllTest < ActiveRecord::TestCase + # Regular expression slightly different. + coerce_tests! :test_update_all_doesnt_ignore_order + def test_update_all_doesnt_ignore_order_coerced + assert_equal authors(:david).id + 1, authors(:mary).id # make sure there is going to be a duplicate PK error + test_update_with_order_succeeds = lambda do |order| + Author.order(order).update_all("id = id + 1") + rescue ActiveRecord::ActiveRecordError + false + end + + if test_update_with_order_succeeds.call("id DESC") + # test that this wasn't a fluke and using an incorrect order results in an exception + assert_not test_update_with_order_succeeds.call("id ASC") + else + # test that we're failing because the current Arel's engine doesn't support UPDATE ORDER BY queries is using subselects instead + assert_queries_match(/\AUPDATE .+ \(SELECT .* ORDER BY id DESC.*\)/i) do + test_update_with_order_succeeds.call("id DESC") + end + end + end + + # SELECT columns must be in the GROUP clause. + coerce_tests! :test_update_all_with_group_by + def test_update_all_with_group_by_coerced + minimum_comments_count = 2 + + Post.most_commented(minimum_comments_count).update_all(title: "ig") + posts = Post.select(:id, :title).group(:title).most_commented(minimum_comments_count).all.to_a + + assert_operator posts.length, :>, 0 + assert posts.all? { |post| post.comments.length >= minimum_comments_count } + assert posts.all? { |post| post.title == "ig" } + + post = Post.select(:id, :title).group(:title).joins(:comments).group("posts.id").having("count(comments.id) < #{minimum_comments_count}").first + assert_not_equal "ig", post.title + end +end + +class DeleteAllTest < ActiveRecord::TestCase + # SELECT columns must be in the GROUP clause. + coerce_tests! :test_delete_all_with_group_by_and_having + def test_delete_all_with_group_by_and_having_coerced + minimum_comments_count = 2 + posts_to_be_deleted = Post.select(:id).most_commented(minimum_comments_count).all.to_a + assert_operator posts_to_be_deleted.length, :>, 0 + + assert_difference("Post.count", -posts_to_be_deleted.length) do + Post.most_commented(minimum_comments_count).delete_all + end + + posts_to_be_deleted.each do |deleted_post| + assert_raise(ActiveRecord::RecordNotFound) { deleted_post.reload } + end + end +end + +module ActiveRecord + class PredicateBuilderTest < ActiveRecord::TestCase + # Same as original test except string has `N` prefix to indicate unicode string. + coerce_tests! :test_registering_new_handlers + def test_registering_new_handlers_coerced + assert_match %r{#{Regexp.escape(topic_title)} ~ N'rails'}i, Topic.where(title: /rails/).to_sql + end + + # Same as original test except string has `N` prefix to indicate unicode string. + coerce_tests! :test_registering_new_handlers_for_association + def test_registering_new_handlers_for_association_coerced + assert_match %r{#{Regexp.escape(topic_title)} ~ N'rails'}i, Reply.joins(:topic).where(topics: {title: /rails/}).to_sql + end + + # Same as original test except string has `N` prefix to indicate unicode string. + coerce_tests! :test_registering_new_handlers_for_joins + def test_registering_new_handlers_for_joins_coerced + Reply.belongs_to :regexp_topic, -> { where(title: /rails/) }, class_name: "Topic", foreign_key: "parent_id" + + assert_match %r{#{Regexp.escape(quote_table_name("regexp_topic.title"))} ~ N'rails'}i, Reply.joins(:regexp_topic).references(Arel.sql("regexp_topic")).to_sql + end + + private + + def topic_title + Topic.lease_connection.quote_table_name("topics.title") + end + end +end + +class QueryCacheTest < ActiveRecord::TestCase + # SQL Server adapter not in list of supported adapters in original test. + coerce_tests! :test_cache_does_not_wrap_results_in_arrays + def test_cache_does_not_wrap_results_in_arrays_coerced + Task.cache do + assert_equal 2, Task.lease_connection.select_value("SELECT count(*) AS count_all FROM tasks") + end + end +end + +class RelationTest < ActiveRecord::TestCase + # Use LEN() instead of LENGTH() function. + coerce_tests! :test_reverse_order_with_function + def test_reverse_order_with_function_coerced + topics = Topic.order(Arel.sql("LEN(title)")).reverse_order + assert_equal topics(:second).title, topics.first.title + end + + # Use LEN() instead of LENGTH() function. + coerce_tests! :test_reverse_order_with_function_other_predicates + def test_reverse_order_with_function_other_predicates_coerced + topics = Topic.order(Arel.sql("author_name, LEN(title), id")).reverse_order + assert_equal topics(:second).title, topics.first.title + topics = Topic.order(Arel.sql("LEN(author_name), id, LEN(title)")).reverse_order + assert_equal topics(:fifth).title, topics.first.title + end + + # We have implicit ordering, via FETCH. + coerce_tests! %r{doesn't have implicit ordering} + + # We have implicit ordering, via FETCH. + coerce_tests! :test_reorder_with_take + def test_reorder_with_take_coerced + sql_log = capture_sql do + assert Post.order(:title).reorder(nil).take + end + assert sql_log.none? { |sql| /order by \[posts\]\.\[title\]/i.match?(sql) }, "ORDER BY title was used in the query: #{sql_log}" + assert sql_log.all? { |sql| /order by \[posts\]\.\[id\]/i.match?(sql) }, "default ORDER BY ID was not used in the query: #{sql_log}" + end + + # We have implicit ordering, via FETCH. + coerce_tests! :test_reorder_with_first + def test_reorder_with_first_coerced + post = nil + sql_log = capture_sql do + post = Post.order(:title).reorder(nil).first + end + assert_equal posts(:welcome), post + assert sql_log.none? { |sql| /order by \[posts\]\.\[title\]/i.match?(sql) }, "ORDER BY title was used in the query: #{sql_log}" + assert sql_log.all? { |sql| /order by \[posts\]\.\[id\]/i.match?(sql) }, "default ORDER BY ID was not used in the query: #{sql_log}" + end + + # We are not doing order duplicate removal anymore. + coerce_tests! :test_order_using_scoping + + # We are not doing order duplicate removal anymore. + coerce_tests! :test_default_scope_order_with_scope_order + + # Order column must be in the GROUP clause. + coerce_tests! :test_multiple_where_and_having_clauses + def test_multiple_where_and_having_clauses_coerced + post = Post.first + having_then_where = Post.having(id: post.id).where(title: post.title) + .having(id: post.id).where(title: post.title).group(:id).select(:id) + + assert_equal [post], having_then_where + end + + # Order column must be in the GROUP clause. + coerce_tests! :test_having_with_binds_for_both_where_and_having + def test_having_with_binds_for_both_where_and_having + post = Post.first + having_then_where = Post.having(id: post.id).where(title: post.title).group(:id).select(:id) + where_then_having = Post.where(title: post.title).having(id: post.id).group(:id).select(:id) + + assert_equal [post], having_then_where + assert_equal [post], where_then_having + end + + # Find any limit via our expression. + coerce_tests! %r{relations don't load all records in #inspect} + def test_relations_dont_load_all_records_in_inspect_coerced + assert_queries_match(/NEXT @0 ROWS/) do + Post.all.inspect + end + end + + # Find any limit via our expression. + coerce_tests! %r{relations don't load all records in #pretty_print} + def test_relations_dont_load_all_records_in_pretty_print_coerced + assert_queries_match(/FETCH NEXT @(\d) ROWS ONLY/) do + PP.pp Post.all, StringIO.new # avoid outputting. + end + end + + # Order column must be in the GROUP clause. + coerce_tests! :test_empty_complex_chained_relations + def test_empty_complex_chained_relations_coerced + posts = Post.select("comments_count").where("id is not null").group("author_id", "id").where("legacy_comments_count > 0") + + assert_queries_count(1) { assert_equal false, posts.empty? } + assert_not_predicate posts, :loaded? + + no_posts = posts.where(title: "") + assert_queries_count(1) { assert_equal true, no_posts.empty? } + assert_not_predicate no_posts, :loaded? + end + + # Can't apply offset without ORDER + coerce_tests! %r{using a custom table affects the wheres} + test "using a custom table affects the wheres coerced" do + post = posts(:welcome) + + assert_equal post, custom_post_relation.where!(title: post.title).order(:id).take + end + + # Can't apply offset without ORDER + coerce_tests! %r{using a custom table with joins affects the joins} + test "using a custom table with joins affects the joins coerced" do + post = posts(:welcome) + + assert_equal post, custom_post_relation.joins(:author).where!(title: post.title).order(:id).take + end + + # Use LEN() instead of LENGTH() function. + coerce_tests! :test_reverse_arel_assoc_order_with_function + def test_reverse_arel_assoc_order_with_function_coerced + topics = Topic.order(Arel.sql("LEN(title)") => :asc).reverse_order + assert_equal topics(:second).title, topics.first.title + end +end + +module ActiveRecord + class RelationTest < ActiveRecord::TestCase + # Skipping this test. SQL Server doesn't support optimizer hint as comments + coerce_tests! :test_relation_with_optimizer_hints_filters_sql_comment_delimiters + + coerce_tests! :test_does_not_duplicate_optimizer_hints_on_merge + def test_does_not_duplicate_optimizer_hints_on_merge_coerced + escaped_table = Post.lease_connection.quote_table_name("posts") + expected = "SELECT #{escaped_table}.* FROM #{escaped_table} OPTION (OMGHINT)" + query = Post.optimizer_hints("OMGHINT").merge(Post.optimizer_hints("OMGHINT")).to_sql + assert_equal expected, query + end + + # Order column must be in the GROUP clause. However, with implicit ordering we can't test this when selecting non-aggregate expression column. + coerce_tests! %r{no queries when using pick with non-aggregate expression and empty IN} + + # Order column must be in the GROUP clause. However, with implicit ordering we can't test this when selecting aggregate expression column. + coerce_tests! %r{runs queries when using pick with aggregate expression despite empty IN} + end +end + +class SanitizeTest < ActiveRecord::TestCase + # Use nvarchar string (N'') in assert + coerce_tests! :test_sanitize_sql_like_example_use_case + def test_sanitize_sql_like_example_use_case_coerced + searchable_post = Class.new(Post) do + def self.search_as_method(term) + where("title LIKE ?", sanitize_sql_like(term, "!")) + end + + scope :search_as_scope, ->(term) { + where("title LIKE ?", sanitize_sql_like(term, "!")) + } + end + + assert_queries_match(/LIKE @0/) do + searchable_post.search_as_method("20% _reduction_!").to_a + end + + assert_queries_match(/LIKE @0/) do + searchable_post.search_as_scope("20% _reduction_!").to_a + end + end + + # Use nvarchar string (N'') in assert + coerce_tests! :test_named_bind_with_literal_colons + def test_named_bind_with_literal_colons_coerced + assert_equal "TO_TIMESTAMP(N'2017/08/02 10:59:00', 'YYYY/MM/DD HH12:MI:SS')", bind("TO_TIMESTAMP(:date, 'YYYY/MM/DD HH12\\:MI\\:SS')", date: "2017/08/02 10:59:00") + assert_raise(ActiveRecord::PreparedStatementInvalid) { bind "TO_TIMESTAMP(:date, 'YYYY/MM/DD HH12:MI:SS')", date: "2017/08/02 10:59:00" } + end +end + +class SchemaDumperTest < ActiveRecord::TestCase + # Use nvarchar string (N'') in assert + coerce_tests! :test_dump_schema_versions_outputs_lexically_reverse_ordered_versions_regardless_of_database_order + def test_dump_schema_versions_outputs_lexically_reverse_ordered_versions_regardless_of_database_order_coerced + versions = %w[20100101010101 20100201010101 20100301010101] + versions.shuffle.each do |v| + @schema_migration.create_version(v) + end + + schema_info = ActiveRecord::Base.lease_connection.dump_schema_versions + expected = <<~STR + INSERT INTO #{ActiveRecord::Base.lease_connection.quote_table_name("schema_migrations")} (version) VALUES + (N'20100301010101'), + (N'20100201010101'), + (N'20100101010101'); + STR + assert_equal expected.strip, schema_info + ensure + @schema_migration.delete_all_versions + end + + # We have precision to 38. + coerce_tests! :test_schema_dump_keeps_large_precision_integer_columns_as_decimal + def test_schema_dump_keeps_large_precision_integer_columns_as_decimal_coerced + output = standard_dump + assert_match %r{t.decimal\s+"atoms_in_universe",\s+precision: 38}, output + end + + # This is a poorly written test and really does not catch the bottom'ness it is meant to. Ours throw it off. + coerce_tests! :test_foreign_keys_are_dumped_at_the_bottom_to_circumvent_dependency_issues + + # Fall through false positive with no filter. + coerce_tests! :test_schema_dumps_partial_indices + def test_schema_dumps_partial_indices_coerced + index_definition = standard_dump.split("\n").grep(/t.index.*company_partial_index/).first.strip + assert_equal 't.index ["firm_id", "type"], name: "company_partial_index", where: "([rating]>(10))"', index_definition + end + + # We do not quote the 2.78 string default. + coerce_tests! :test_schema_dump_includes_decimal_options + def test_schema_dump_includes_decimal_options_coerced + output = dump_all_table_schema([/^[^n]/]) + assert_match %r{precision: 3,[[:space:]]+scale: 2,[[:space:]]+default: 2\.78}, output + end + + # Tests are not about a specific adapter. + coerce_tests! :test_do_not_dump_foreign_keys_when_bypassed_by_config + + # SQL Server formats the check constraint expression differently. + coerce_tests! :test_schema_dumps_check_constraints + def test_schema_dumps_check_constraints_coerced + constraint_definition = dump_table_schema("products").split("\n").grep(/t.check_constraint.*products_price_check/).first.strip + assert_equal 't.check_constraint "[price]>[discounted_price]", name: "products_price_check"', constraint_definition + end +end + +class SchemaDumperDefaultsTest < ActiveRecord::TestCase + # These date formats do not match ours. We got these covered in our dumper tests. + coerce_tests! :test_schema_dump_defaults_with_universally_supported_types + + # SQL Server uses different method to generate a UUID than Rails test uses. Reimplemented the + # test in 'SchemaDumperDefaultsCoerceTest'. + coerce_tests! :test_schema_dump_with_text_column +end + +class SchemaDumperDefaultsCoerceTest < ActiveRecord::TestCase + include SchemaDumpingHelper + + setup do + @connection = ActiveRecord::Base.lease_connection + @connection.create_table :dump_defaults, force: true do |t| + t.string :string_with_default, default: "Hello!" + t.date :date_with_default, default: "2014-06-05" + t.datetime :datetime_with_default, default: "2014-06-05 07:17:04" + t.time :time_with_default, default: "07:17:04" + t.decimal :decimal_with_default, default: "1234567890.0123456789", precision: 20, scale: 10 + + t.text :text_with_default, default: "John' Doe" + t.text :uuid, default: -> { "newid()" } + end + end + + def test_schema_dump_with_text_column_coerced + output = dump_table_schema("dump_defaults") + + assert_match %r{t\.text\s+"text_with_default",.*?default: "John' Doe"}, output + assert_match %r{t\.text\s+"uuid",.*?default: -> \{ "newid\(\)" \}}, output + end +end + +class TestAdapterWithInvalidConnection < ActiveRecord::TestCase + # We trust Rails on this since we do not want to install mysql. + coerce_tests! %r{inspect on Model class does not raise} +end + +class TransactionTest < ActiveRecord::TestCase + # SQL Server does not have query for release_savepoint. + coerce_tests! :test_releasing_named_savepoints + def test_releasing_named_savepoints_coerced + Topic.transaction do + Topic.lease_connection.materialize_transactions + + Topic.lease_connection.create_savepoint("another") + Topic.lease_connection.release_savepoint("another") + + # We do not have a notion of releasing, so this does nothing and doesn't raise an error. + assert_nothing_raised do + Topic.lease_connection.release_savepoint("another") + end + end + end + + # SQL Server does not have query for release_savepoint. + coerce_tests! :test_nested_transactions_after_disable_lazy_transactions + def test_nested_transactions_after_disable_lazy_transactions_coerced + Topic.lease_connection.disable_lazy_transactions! + + actual_queries = capture_sql(include_schema: true) do + # RealTransaction (begin..commit) + Topic.transaction(requires_new: true) do + # ResetParentTransaction (no queries) + Topic.transaction(requires_new: true) do + Topic.delete_all + # SavepointTransaction (savepoint..release) + Topic.transaction(requires_new: true) do + # ResetParentTransaction (no queries) + Topic.transaction(requires_new: true) do + # no-op + end + end + end + Topic.delete_all + end + end + + expected_queries = [ + /BEGIN/i, + /DELETE/i, + /^SAVE TRANSACTION/i, + /DELETE/i, + /COMMIT/i + ] + + assert_equal expected_queries.size, actual_queries.size + expected_queries.zip(actual_queries) do |expected, actual| + assert_match expected, actual + end + end + + # SQL Server does not have query for release_savepoint. + coerce_tests! :test_nested_transactions_skip_excess_savepoints + def test_nested_transactions_skip_excess_savepoints_coerced + actual_queries = capture_sql(include_schema: true) do + # RealTransaction (begin..commit) + Topic.transaction(requires_new: true) do + # ResetParentTransaction (no queries) + Topic.transaction(requires_new: true) do + Topic.delete_all + # SavepointTransaction (savepoint..release) + Topic.transaction(requires_new: true) do + # ResetParentTransaction (no queries) + Topic.transaction(requires_new: true) do + Topic.delete_all + end + end + end + Topic.delete_all + end + end + + expected_queries = [ + /BEGIN/i, + /DELETE/i, + /^SAVE TRANSACTION/i, + /DELETE/i, + /DELETE/i, + /COMMIT/i + ] + + assert_equal expected_queries.size, actual_queries.size + expected_queries.zip(actual_queries) do |expected, actual| + assert_match expected, actual + end + end +end + +class TransactionIsolationTest < ActiveRecord::TestCase + # SQL Server will lock the table for counts even when both + # connections are `READ COMMITTED`. So we bypass with `READPAST`. + coerce_tests! %r{read committed} + test "read committed coerced" do + Tag.transaction(isolation: :read_committed) do + assert_equal 0, Tag.count + Tag2.transaction do + Tag2.create + assert_equal 0, Tag.lock("WITH(READPAST)").count + end + end + assert_equal 1, Tag.count + end + + # I really need some help understanding this one. + coerce_tests! %r{repeatable read} + + private + + # Need to handle the resetting of the isolation level in the adapter by `SQLServerRealTransaction#commit` for each + # connection pool. After the resetting events have been removed we can assert the number of expected isolation level + # events. This workaround assumes that the `count` also matches the number of connection pools used in the test. + # Note: MySQL & PostgreSQL do not reset the connection and SQLite does support transaction isolation. + undef_method :assert_begin_isolation_level_event + def assert_begin_isolation_level_event(events, isolation: "READ COMMITTED", count: 1) + isolation_events = events.select { |event| event.match(/SET TRANSACTION ISOLATION LEVEL/) } + + count.times do + index_of_reset_starting_isolation_level_event = isolation_events.index("SET TRANSACTION ISOLATION LEVEL READ COMMITTED") + assert index_of_reset_starting_isolation_level_event.present? + isolation_events.delete_at(index_of_reset_starting_isolation_level_event) + end + + assert_equal count, isolation_events.count { |event| event.match(/SET TRANSACTION ISOLATION LEVEL #{isolation}/) } + end +end + +class ViewWithPrimaryKeyTest < ActiveRecord::TestCase + # We have a few view tables. use includes vs equality. + coerce_tests! :test_views + def test_views_coerced + assert_includes @connection.views, Ebook.table_name + end + + # We do better than ActiveRecord and find the views PK. + coerce_tests! :test_does_not_assume_id_column_as_primary_key + def test_does_not_assume_id_column_as_primary_key_coerced + model = Class.new(ActiveRecord::Base) { self.table_name = "ebooks" } + assert_equal "id", model.primary_key + end +end + +class ViewWithoutPrimaryKeyTest < ActiveRecord::TestCase + # We have a few view tables. use includes vs equality. + coerce_tests! :test_views + def test_views_coerced + assert_includes @connection.views, Paperback.table_name + end +end + +class YamlSerializationTest < ActiveRecord::TestCase + coerce_tests! :test_types_of_virtual_columns_are_not_changed_on_round_trip + def test_types_of_virtual_columns_are_not_changed_on_round_trip_coerced + author = Author.select("authors.*, 5 as posts_count").first + dumped_author = YAML.dump(author) + dumped = YAML.respond_to?(:unsafe_load) ? YAML.unsafe_load(dumped_author) : YAML.load(dumped_author) + assert_equal 5, author.posts_count + assert_equal 5, dumped.posts_count + end +end + +class DateTimePrecisionTest < ActiveRecord::TestCase + # Original test had `7` which we support vs `8` which we use. + coerce_tests! :test_invalid_datetime_precision_raises_error + def test_invalid_datetime_precision_raises_error_coerced + assert_raises ActiveRecord::ActiveRecordError do + @connection.create_table(:foos, force: true) do |t| + t.timestamps precision: 8 + end + end + end + + # datetime is rounded to increments of .000, .003, or .007 seconds + coerce_tests! :test_datetime_precision_is_truncated_on_assignment + def test_datetime_precision_is_truncated_on_assignment_coerced + @connection.create_table(:foos, force: true) + @connection.add_column :foos, :created_at, :datetime, precision: 0 + @connection.add_column :foos, :updated_at, :datetime, precision: 6 + + time = ::Time.now.change(nsec: 123456789) + foo = Foo.new(created_at: time, updated_at: time) + + assert_equal 0, foo.created_at.nsec + assert_equal 123457000, foo.updated_at.nsec + + foo.save! + foo.reload + + assert_equal 0, foo.created_at.nsec + assert_equal 123457000, foo.updated_at.nsec + end +end + +class TimePrecisionTest < ActiveRecord::TestCase + # datetime is rounded to increments of .000, .003, or .007 seconds + coerce_tests! :test_time_precision_is_truncated_on_assignment + def test_time_precision_is_truncated_on_assignment_coerced + @connection.create_table(:foos, force: true) + @connection.add_column :foos, :start, :time, precision: 0 + @connection.add_column :foos, :finish, :time, precision: 6 + + time = ::Time.now.change(nsec: 123456789) + foo = Foo.new(start: time, finish: time) + + assert_equal 0, foo.start.nsec + assert_equal 123457000, foo.finish.nsec + + foo.save! + foo.reload + + assert_equal 0, foo.start.nsec + assert_equal 123457000, foo.finish.nsec + end + + # SQL Server uses default precision for time. + coerce_tests! :test_no_time_precision_isnt_truncated_on_assignment + + # SQL Server accepts precision of 7 for time. + coerce_tests! :test_invalid_time_precision_raises_error +end + +class DefaultNumbersTest < ActiveRecord::TestCase + # We do better with native types and do not return strings for everything. + coerce_tests! :test_default_positive_integer + def test_default_positive_integer_coerced + record = DefaultNumber.new + assert_equal 7, record.positive_integer + assert_equal 7, record.positive_integer_before_type_cast + end + + # We do better with native types and do not return strings for everything. + coerce_tests! :test_default_negative_integer + def test_default_negative_integer_coerced + record = DefaultNumber.new + assert_equal(-5, record.negative_integer) + assert_equal(-5, record.negative_integer_before_type_cast) + end + + # We do better with native types and do not return strings for everything. + coerce_tests! :test_default_decimal_number + def test_default_decimal_number_coerced + record = DefaultNumber.new + assert_equal BigDecimal("2.78"), record.decimal_number + assert_equal 2.78, record.decimal_number_before_type_cast + end +end + +module ActiveRecord + class CollectionCacheKeyTest < ActiveRecord::TestCase + # Will trust rails has this sorted since you cant offset without a limit. + coerce_tests! %r{with offset which return 0 rows} + end +end + +module ActiveRecord + class CacheKeyTest < ActiveRecord::TestCase + # Like Mysql2 and PostgreSQL, SQL Server doesn't return a string value for updated_at. In the Rails tests + # the tests are skipped if adapter is Mysql2 or PostgreSQL. + coerce_tests! %r{cache_version is the same when it comes from the DB or from the user} + coerce_tests! %r{cache_version does NOT call updated_at when value is from the database} + coerce_tests! %r{cache_version does not truncate zeros when timestamp ends in zeros} + end +end + +module ActiveRecord + class StatementCacheTest < ActiveRecord::TestCase + # Getting random failures. + coerce_tests! :test_find_does_not_use_statement_cache_if_table_name_is_changed + + # Need to remove index as SQL Server considers NULLs on a unique-index to be equal unlike PostgreSQL/MySQL/SQLite. + coerce_tests! :test_statement_cache_values_differ + def test_statement_cache_values_differ_coerced + Book.lease_connection.remove_index(:books, column: [:author_id, :name]) + + original_test_statement_cache_values_differ + ensure + Book.where(author_id: nil, name: "my book").delete_all + Book.lease_connection.add_index(:books, [:author_id, :name], unique: true) + end + end +end + +module ActiveRecord + module ConnectionAdapters + class SchemaCacheTest < ActiveRecord::TestCase + # Tests fail on Windows AppVeyor CI with 'Permission denied' error when renaming file during `File.atomic_write` call. + coerce_tests! :test_yaml_dump_and_load, :test_yaml_dump_and_load_with_gzip if /mswin|mingw/.match?(RbConfig::CONFIG["host_os"]) + + # Cast type in SQL Server is :varchar rather than Unicode :string. + coerce_tests! :test_yaml_load_8_0_dump_without_cast_type_still_get_the_right_one + def test_yaml_load_8_0_dump_without_cast_type_still_get_the_right_one + cache = load_bound_reflection(schema_dump_8_0_path) + + assert_no_queries do + columns = cache.columns_hash("courses") + assert_equal 3, columns.size + cast_type = columns["name"].fetch_cast_type(@connection) + assert_not_nil cast_type, "expected cast_type to be present" + assert_equal :varchar, cast_type.type + end + end + + private + + # We need to give the full paths for this to work. + undef_method :schema_dump_5_1_path + def schema_dump_5_1_path + File.join(ARTest::SQLServer.root_activerecord, "test/assets/schema_dump_5_1.yml") + end + + undef_method :schema_dump_8_0_path + def schema_dump_8_0_path + File.join(ARTest::SQLServer.root_activerecord, "test/assets/schema_dump_8_0.yml") + end + end + end +end + +class UnsafeRawSqlTest < ActiveRecord::TestCase + fixtures :posts + + # Use LEN() instead of LENGTH() function. + coerce_tests! %r{order: always allows Arel} + test "order: always allows Arel" do + titles = Post.order(Arel.sql("len(title)")).pluck(:title) + + assert_not_empty titles + end + + # Use LEN() instead of LENGTH() function. + coerce_tests! %r{pluck: always allows Arel} + test "pluck: always allows Arel" do + excepted_values = Post.includes(:comments).pluck(:title).map { |title| [title, title.size] } + values = Post.includes(:comments).pluck(:title, Arel.sql("len(title)")) + + assert_equal excepted_values, values + end + + # Use LEN() instead of LENGTH() function. + coerce_tests! %r{order: allows valid Array arguments} + test "order: allows valid Array arguments" do + ids_expected = Post.order(Arel.sql("author_id, len(title)")).pluck(:id) + + ids = Post.order(["author_id", "len(title)"]).pluck(:id) + + assert_equal ids_expected, ids + end + + # Use LEN() instead of LENGTH() function. + coerce_tests! %r{order: allows nested functions} + test "order: allows nested functions" do + ids_expected = Post.order(Arel.sql("author_id, len(trim(title))")).pluck(:id) + + # $DEBUG = true + ids = Post.order("author_id, len(trim(title))").pluck(:id) + + assert_equal ids_expected, ids + end + + # Use LEN() instead of LENGTH() function. + coerce_tests! %r{pluck: allows nested functions} + test "pluck: allows nested functions" do + title_lengths_expected = Post.pluck(Arel.sql("len(trim(title))")) + + title_lengths = Post.pluck("len(trim(title))") + + assert_equal title_lengths_expected, title_lengths + end + + test "order: allows string column names that are quoted" do + ids_expected = Post.order(Arel.sql("id")).pluck(:id) + + ids = Post.order("[id]").pluck(:id) + + assert_equal ids_expected, ids + end + + test "order: allows string column names that are quoted with table" do + ids_expected = Post.order(Arel.sql("id")).pluck(:id) + + ids = Post.order("[posts].[id]").pluck(:id) + + assert_equal ids_expected, ids + end + + test "order: allows string column names that are quoted with table and user" do + ids_expected = Post.order(Arel.sql("id")).pluck(:id) + + ids = Post.order("[dbo].[posts].[id]").pluck(:id) + + assert_equal ids_expected, ids + end + + test "order: allows string column names that are quoted with table, user and database" do + ids_expected = Post.order(Arel.sql("id")).pluck(:id) + + ids = Post.order("[activerecord_unittest].[dbo].[posts].[id]").pluck(:id) + + assert_equal ids_expected, ids + end + + test "pluck: allows string column name that are quoted" do + titles_expected = Post.pluck(Arel.sql("title")) + + titles = Post.pluck("[title]") + + assert_equal titles_expected, titles + end + + test "pluck: allows string column name that are quoted with table" do + titles_expected = Post.pluck(Arel.sql("title")) + + titles = Post.pluck("[posts].[title]") + + assert_equal titles_expected, titles + end + + test "pluck: allows string column name that are quoted with table and user" do + titles_expected = Post.pluck(Arel.sql("title")) + + titles = Post.pluck("[dbo].[posts].[title]") + + assert_equal titles_expected, titles + end + + test "pluck: allows string column name that are quoted with table, user and database" do + titles_expected = Post.pluck(Arel.sql("title")) + + titles = Post.pluck("[activerecord_unittest].[dbo].[posts].[title]") + + assert_equal titles_expected, titles + end + + # Collation name should not be quoted. Hardcoded values for different adapters. + coerce_tests! %r{order: allows valid arguments with COLLATE} + test "order: allows valid arguments with COLLATE" do + collation_name = "Latin1_General_CS_AS_WS" + + ids_expected = Post.order(Arel.sql(%(author_id, title COLLATE #{collation_name} DESC))).pluck(:id) + + ids = Post.order(["author_id", %(title COLLATE #{collation_name} DESC)]).pluck(:id) + + assert_equal ids_expected, ids + end +end + +class ReservedWordTest < ActiveRecord::TestCase + coerce_tests! :test_change_columns + def test_change_columns_coerced + assert_nothing_raised { @connection.change_column_default(:group, :order, "whatever") } + assert_nothing_raised { @connection.change_column("group", "order", :text) } + assert_nothing_raised { @connection.change_column_null("group", "order", true) } + assert_nothing_raised { @connection.rename_column(:group, :order, :values) } + end +end + +class OptimisticLockingTest < ActiveRecord::TestCase + # We do not allow updating identities, but we can test using a non-identity key + coerce_tests! :test_update_with_dirty_primary_key + def test_update_with_dirty_primary_key_coerced + assert_raises(ActiveRecord::RecordNotUnique) do + record = StringKeyObject.find("record1") + record.id = "record2" + record.save! + end + + record = StringKeyObject.find("record1") + record.id = "record42" + record.save! + + assert StringKeyObject.find("record42") + assert_raises(ActiveRecord::RecordNotFound) do + StringKeyObject.find("record1") + end + end +end + +class RelationMergingTest < ActiveRecord::TestCase + # Use nvarchar string (N'') in assert + coerce_tests! :test_merging_with_order_with_binds + def test_merging_with_order_with_binds_coerced + relation = Post.all.merge(Post.order([Arel.sql("title LIKE ?"), "%suffix"])) + assert_equal ["title LIKE N'%suffix'"], relation.order_values + end + + # Same as original but change first regexp to match sp_executesql binding syntax + coerce_tests! :test_merge_doesnt_duplicate_same_clauses + def test_merge_doesnt_duplicate_same_clauses_coerced + david, mary, bob = authors(:david, :mary, :bob) + + non_mary_and_bob = Author.where.not(id: [mary, bob]) + + author_id = Author.lease_connection.quote_table_name("authors.id") + assert_queries_match(/WHERE #{Regexp.escape(author_id)} NOT IN \((@\d), \g<1>\)/) do + assert_equal [david], non_mary_and_bob.merge(non_mary_and_bob) + end + + only_david = Author.where("#{author_id} IN (?)", david) + + assert_queries_match(/WHERE \(#{Regexp.escape(author_id)} IN \(@\d\)\)/) do + assert_equal [david], only_david.merge(only_david) + end + end +end + +module ActiveRecord + class DatabaseTasksTruncateAllTest < ActiveRecord::TestCase + # SQL Server does not allow truncation of tables that are referenced by foreign key + # constraints. As this test truncates all tables we would need to remove all foreign + # key constraints and then restore them afterwards to get this test to pass. + coerce_tests! :test_truncate_tables + end +end + +class EnumTest < ActiveRecord::TestCase + # Need to remove index as SQL Server considers NULLs on a unique-index to be equal unlike PostgreSQL/MySQL/SQLite. + coerce_tests! %r{enums are distinct per class} + test "enums are distinct per class coerced" do + Book.lease_connection.remove_index(:books, column: [:author_id, :name]) + + send(:"original_enums are distinct per class") + ensure + Book.where(author_id: nil, name: nil).delete_all + Book.lease_connection.add_index(:books, [:author_id, :name], unique: true) + end + + # Need to remove index as SQL Server considers NULLs on a unique-index to be equal unlike PostgreSQL/MySQL/SQLite. + coerce_tests! %r{creating new objects with enum scopes} + test "creating new objects with enum scopes coerced" do + Book.lease_connection.remove_index(:books, column: [:author_id, :name]) + + send(:"original_creating new objects with enum scopes") + ensure + Book.where(author_id: nil, name: nil).delete_all + Book.lease_connection.add_index(:books, [:author_id, :name], unique: true) + end + + # Need to remove index as SQL Server considers NULLs on a unique-index to be equal unlike PostgreSQL/MySQL/SQLite. + coerce_tests! %r{enums are inheritable} + test "enums are inheritable coerced" do + Book.lease_connection.remove_index(:books, column: [:author_id, :name]) + + send(:"original_enums are inheritable") + ensure + Book.where(author_id: nil, name: nil).delete_all + Book.lease_connection.add_index(:books, [:author_id, :name], unique: true) + end + + # Need to remove index as SQL Server considers NULLs on a unique-index to be equal unlike PostgreSQL/MySQL/SQLite. + coerce_tests! %r{serializable\? with large number label} + test "serializable? with large number label coerced" do + Book.lease_connection.remove_index(:books, column: [:author_id, :name]) + + send(:"original_serializable\\? with large number label") + ensure + Book.where(author_id: nil, name: nil).delete_all + Book.lease_connection.add_index(:books, [:author_id, :name], unique: true) + end +end + +class EagerLoadingTooManyIdsTest < ActiveRecord::TestCase + fixtures :citations + + # Original Rails test fails with SQL Server error message "The query processor ran out of internal resources and + # could not produce a query plan". This error goes away if you change database compatibility level to 110 (SQL 2012) + # (see https://www.mssqltips.com/sqlservertip/5279/sql-server-error-query-processor-ran-out-of-internal-resources-and-could-not-produce-a-query-plan/). + # However, you cannot change the compatibility level during a test. The purpose of the test is to ensure that an + # unprepared statement is used if the number of values exceeds the adapter's `bind_params_length`. The coerced test + # still does this as there will be 32,768 remaining citation records in the database and the `bind_params_length` of + # adapter is 2,098. + coerce_tests! :test_eager_loading_too_many_ids + def test_eager_loading_too_many_ids_coerced + # Remove excess records. + Citation.limit(32768).order(id: :desc).delete_all + + # Perform test + citation_count = Citation.count + assert_queries_match(/WHERE \[citations\]\.\[id\] IN \(0, 1/) do + assert_equal citation_count, Citation.eager_load(:citations).offset(0).size + end + end +end + +class LogSubscriberTest < ActiveRecord::TestCase + # Call original test from coerced test. Fixes issue on CI with Rails installed as a gem. + coerce_tests! :test_verbose_query_logs + def test_verbose_query_logs_coerced + original_test_verbose_query_logs + end +end + +class ReloadModelsTest < ActiveRecord::TestCase + # Skip test on Windows. The number of arguments passed to `IO.popen` in + # `activesupport/lib/active_support/testing/isolation.rb` exceeds what Windows can handle. + coerce_tests! :test_has_one_with_reload if /mswin|mingw/.match?(RbConfig::CONFIG["host_os"]) +end + +class MarshalSerializationTest < ActiveRecord::TestCase + private + + undef_method :marshal_fixture_path + def marshal_fixture_path(file_name) + File.expand_path( + "support/marshal_compatibility_fixtures/#{ActiveRecord::Base.lease_connection.adapter_name}/#{file_name}.dump", + ARTest::SQLServer.test_root_sqlserver + ) + end +end + +class NestedThroughAssociationsTest < ActiveRecord::TestCase + # Same as original but replace order with "order(:id)" to ensure that assert_includes_and_joins_equal doesn't raise + # "A column has been specified more than once in the order by list" + # Example: original test generate queries like "ORDER BY authors.id, [authors].[id]". We don't support duplicate columns in the order list + coerce_tests! :test_has_many_through_has_many_with_has_many_through_habtm_source_reflection_preload_via_joins, :test_has_many_through_has_and_belongs_to_many_with_has_many_source_reflection_preload_via_joins + def test_has_many_through_has_many_with_has_many_through_habtm_source_reflection_preload_via_joins_coerced + # preload table schemas + Author.joins(:category_post_comments).first + + assert_includes_and_joins_equal( + Author.where("comments.id" => comments(:does_it_hurt).id).order(:id), + [authors(:david), authors(:mary)], :category_post_comments + ) + end + + def test_has_many_through_has_and_belongs_to_many_with_has_many_source_reflection_preload_via_joins_coerced + # preload table schemas + Category.joins(:post_comments).first + + assert_includes_and_joins_equal( + Category.where("comments.id" => comments(:more_greetings).id).order(:id), + [categories(:general), categories(:technology)], :post_comments + ) + end +end + +class PreloaderTest < ActiveRecord::TestCase + # Need to handle query parameters in SQL regex. + coerce_tests! :test_preloads_has_many_on_model_with_a_composite_primary_key_through_id_attribute + def test_preloads_has_many_on_model_with_a_composite_primary_key_through_id_attribute_coerced + order = cpk_orders(:cpk_groceries_order_2) + _shop_id, order_id = order.id + order_agreements = Cpk::OrderAgreement.where(order_id: order_id).to_a + + assert_not_empty order_agreements + assert_equal order_agreements.sort, order.order_agreements.sort + + loaded_order = nil + sql = capture_sql do + loaded_order = Cpk::Order.where(id: order_id).includes(:order_agreements).to_a.first + end + + assert_equal 2, sql.size + preload_sql = sql.last + + c = Cpk::OrderAgreement.lease_connection + order_id_column = Regexp.escape(c.quote_table_name("cpk_order_agreements.order_id")) + order_id_constraint = /#{order_id_column} = @0$/ + expectation = /SELECT.*WHERE.* #{order_id_constraint}/ + + assert_match(expectation, preload_sql) + assert_equal order_agreements.sort, loaded_order.order_agreements.sort + end + + # Need to handle query parameters in SQL regex. + coerce_tests! :test_preloads_belongs_to_a_composite_primary_key_model_through_id_attribute + def test_preloads_belongs_to_a_composite_primary_key_model_through_id_attribute_coerced + order_agreement = cpk_order_agreements(:order_agreement_three) + order = cpk_orders(:cpk_groceries_order_2) + assert_equal order, order_agreement.order + + loaded_order_agreement = nil + sql = capture_sql do + loaded_order_agreement = Cpk::OrderAgreement.where(id: order_agreement.id).includes(:order).to_a.first + end + + assert_equal 2, sql.size + preload_sql = sql.last + + c = Cpk::Order.lease_connection + order_id = Regexp.escape(c.quote_table_name("cpk_orders.id")) + order_constraint = /#{order_id} = @0$/ + expectation = /SELECT.*WHERE.* #{order_constraint}/ + + assert_match(expectation, preload_sql) + assert_equal order, loaded_order_agreement.order + end +end + +class MigratorTest < ActiveRecord::TestCase + # Test fails on Windows AppVeyor CI for unknown reason. + coerce_tests! :test_migrator_db_has_no_schema_migrations_table if /mswin|mingw/.match?(RbConfig::CONFIG["host_os"]) +end + +class MultiDbMigratorTest < ActiveRecord::TestCase + # Test fails on Windows AppVeyor CI for unknown reason. + coerce_tests! :test_migrator_db_has_no_schema_migrations_table if /mswin|mingw/.match?(RbConfig::CONFIG["host_os"]) +end + +class FieldOrderedValuesTest < ActiveRecord::TestCase + # Need to remove index as SQL Server considers NULLs on a unique-index to be equal unlike PostgreSQL/MySQL/SQLite. + coerce_tests! :test_in_order_of_with_enums_values + def test_in_order_of_with_enums_values_coerced + Book.lease_connection.remove_index(:books, column: [:author_id, :name]) + + original_test_in_order_of_with_enums_values + ensure + Book.where(author_id: nil, name: nil).delete_all + Book.lease_connection.add_index(:books, [:author_id, :name], unique: true) + end + + # Need to remove index as SQL Server considers NULLs on a unique-index to be equal unlike PostgreSQL/MySQL/SQLite. + coerce_tests! :test_in_order_of_with_string_column + def test_in_order_of_with_string_column_coerced + Book.lease_connection.remove_index(:books, column: [:author_id, :name]) + + original_test_in_order_of_with_string_column + ensure + Book.where(author_id: nil, name: nil).delete_all + Book.lease_connection.add_index(:books, [:author_id, :name], unique: true) + end + + # Need to remove index as SQL Server considers NULLs on a unique-index to be equal unlike PostgreSQL/MySQL/SQLite. + coerce_tests! :test_in_order_of_with_enums_keys + def test_in_order_of_with_enums_keys_coerced + Book.lease_connection.remove_index(:books, column: [:author_id, :name]) + + original_test_in_order_of_with_enums_keys + ensure + Book.where(author_id: nil, name: nil).delete_all + Book.lease_connection.add_index(:books, [:author_id, :name], unique: true) + end + + # Need to remove index as SQL Server considers NULLs on a unique-index to be equal unlike PostgreSQL/MySQL/SQLite. + coerce_tests! :test_in_order_of_with_nil + def test_in_order_of_with_nil_coerced + Book.lease_connection.remove_index(:books, column: [:author_id, :name]) + + original_test_in_order_of_with_nil + ensure + Book.where(author_id: nil, name: nil).delete_all + Book.lease_connection.add_index(:books, [:author_id, :name], unique: true) + end +end + +class QueryLogsTest < ActiveRecord::TestCase + # Invalid character encoding causes `ActiveRecord::StatementInvalid` error similar to Postgres. + coerce_tests! :test_invalid_encoding_query + def test_invalid_encoding_query_coerced + ActiveRecord::QueryLogs.tags = [:application] + assert_raises ActiveRecord::StatementInvalid do + ActiveRecord::Base.lease_connection.execute "select 1 as '\xFF'" + end + end +end + +class InsertAllTest < ActiveRecord::TestCase + # Same as original but using INSERTED.name as UPPER argument + coerce_tests! :test_insert_all_returns_requested_sql_fields + def test_insert_all_returns_requested_sql_fields_coerced + skip unless supports_insert_returning? + + result = Book.insert_all! [{name: "Rework", author_id: 1}], returning: Arel.sql("UPPER(INSERTED.name) as name") + assert_equal %w[REWORK], result.pluck("name") + end + + # Need to remove index as SQL Server considers NULLs on a unique-index to be equal unlike PostgreSQL/MySQL/SQLite. + coerce_tests! :test_insert_with_type_casting_and_serialize_is_consistent + def test_insert_with_type_casting_and_serialize_is_consistent_coerced + connection.remove_index(:books, column: [:author_id, :name]) + + original_test_insert_with_type_casting_and_serialize_is_consistent + ensure + Book.where(author_id: nil, name: '["Array"]').delete_all + Book.lease_connection.add_index(:books, [:author_id, :name], unique: true) + end + + # Same as original but using target.status for assignment and CASE instead of GREATEST for operator + coerce_tests! :test_upsert_all_updates_using_provided_sql + def test_upsert_all_updates_using_provided_sql_coerced + Book.upsert_all( + [{id: 1, status: 1}, {id: 2, status: 1}], + on_duplicate: Arel.sql(<<~SQL + target.status = CASE + WHEN target.status > 1 THEN target.status + ELSE 1 + END + SQL + ) + ) + + assert_equal "published", Book.find(1).status + assert_equal "written", Book.find(2).status + end +end + +module ActiveRecord + class Migration + class InvalidOptionsTest < ActiveRecord::TestCase + # Include the additional SQL Server migration options. + undef_method :invalid_add_column_option_exception_message + def invalid_add_column_option_exception_message(key) + default_keys = [":limit", ":precision", ":scale", ":default", ":null", ":collation", ":comment", ":primary_key", ":if_exists", ":if_not_exists"] + default_keys.concat([":is_identity", ":as", ":stored"]) # SQL Server additional valid keys + + "Unknown key: :#{key}. Valid keys are: #{default_keys.join(", ")}" + end + end + end +end + +# Need to use `install_unregistered_type_fallback` instead of `install_unregistered_type_error` so that message-pack +# can read and write `ActiveRecord::ConnectionAdapters::SQLServer::Type::Data` objects. +class ActiveRecordMessagePackTest < ActiveRecord::TestCase + private + + undef_method :serializer + def serializer + @serializer ||= ::MessagePack::Factory.new.tap do |factory| + ActiveRecord::MessagePack::Extensions.install(factory) + ActiveSupport::MessagePack::Extensions.install(factory) + ActiveSupport::MessagePack::Extensions.install_unregistered_type_fallback(factory) + end + end +end + +class StoreTest < ActiveRecord::TestCase + # Set the attribute as JSON type for the `StoreTest#saved changes tracking for accessors with json column` test. + Admin::User.attribute :json_options, ActiveRecord::Type::SQLServer::Json.new +end + +class TestDatabasesTest < ActiveRecord::TestCase + # Tests are not about a specific adapter. + coerce_all_tests! +end + +module ActiveRecord + module ConnectionAdapters + class ConnectionHandlersShardingDbTest < ActiveRecord::TestCase + # Tests are not about a specific adapter. + coerce_all_tests! + end + end +end + +module ActiveRecord + module ConnectionAdapters + class ConnectionSwappingNestedTest < ActiveRecord::TestCase + # Tests are not about a specific adapter. + coerce_all_tests! + end + end +end + +module ActiveRecord + module ConnectionAdapters + class ConnectionHandlersMultiDbTest < ActiveRecord::TestCase + # Tests are not about a specific adapter. + coerce_tests! :test_switching_connections_via_handler + end + end +end + +module ActiveRecord + module ConnectionAdapters + class ConnectionHandlersMultiPoolConfigTest < ActiveRecord::TestCase + # Tests are not about a specific adapter. + coerce_all_tests! + end + end +end + +module ActiveRecord + class Migration + class CheckConstraintTest < ActiveRecord::TestCase + # SQL Server formats the check constraint expression differently. + coerce_tests! :test_check_constraints + def test_check_constraints_coerced + check_constraints = @connection.check_constraints("products") + assert_equal 1, check_constraints.size + + constraint = check_constraints.first + assert_equal "products", constraint.table_name + assert_equal "products_price_check", constraint.name + assert_equal "[price]>[discounted_price]", constraint.expression + end + + # SQL Server formats the check constraint expression differently. + coerce_tests! :test_add_check_constraint + def test_add_check_constraint_coerced + @connection.add_check_constraint :trades, "quantity > 0" + + check_constraints = @connection.check_constraints("trades") + assert_equal 1, check_constraints.size + + constraint = check_constraints.first + assert_equal "trades", constraint.table_name + assert_equal "chk_rails_2189e9f96c", constraint.name + assert_equal "[quantity]>(0)", constraint.expression + end + + # SQL Server formats the check constraint expression differently. + coerce_tests! :test_remove_check_constraint + def test_remove_check_constraint_coerced + @connection.add_check_constraint :trades, "price > 0", name: "price_check" + @connection.add_check_constraint :trades, "quantity > 0", name: "quantity_check" + + assert_equal 2, @connection.check_constraints("trades").size + @connection.remove_check_constraint :trades, name: "quantity_check" + assert_equal 1, @connection.check_constraints("trades").size + + constraint = @connection.check_constraints("trades").first + assert_equal "trades", constraint.table_name + assert_equal "price_check", constraint.name + assert_equal "[price]>(0)", constraint.expression + + @connection.remove_check_constraint :trades, name: :price_check # name as a symbol + assert_empty @connection.check_constraints("trades") + end + end + end +end + +module ActiveRecord + module ConnectionAdapters + class PoolConfig + class ResolverTest < ActiveRecord::TestCase + # SQL Server was not included in the list of available adapters in the error message. + coerce_tests! :test_url_invalid_adapter + def test_url_invalid_adapter_coerced + error = assert_raises(AdapterNotFound) do + Base.connection_handler.establish_connection "ridiculous://foo?encoding=utf8" + end + + assert_match "Database configuration specifies nonexistent 'ridiculous' adapter. Available adapters are: abstract, fake, mysql2, postgresql, sqlite3, sqlserver, trilogy. Ensure that the adapter is spelled correctly in config/database.yml and that you've added the necessary adapter gem to your Gemfile if it's not in the list of available adapters.", error.message + end + end + end + end +end + +module ActiveRecord + module ConnectionAdapters + class RegistrationIsolatedTest < ActiveRecord::TestCase + # SQL Server was not included in the list of available adapters in the error message. + coerce_tests! %r{resolve raises if the adapter is using the pre 7.2 adapter registration API} + def resolve_raises_if_the_adapter_is_using_the_pre_7_2_adapter_registration_API + exception = assert_raises(ActiveRecord::AdapterNotFound) do + ActiveRecord::ConnectionAdapters.resolve("fake_legacy") + end + + assert_equal( + "Database configuration specifies nonexistent 'ridiculous' adapter. Available adapters are: abstract, fake, mysql2, postgresql, sqlite3, sqlserver, trilogy. Ensure that the adapter is spelled correctly in config/database.yml and that you've added the necessary adapter gem to your Gemfile if it's not in the list of available adapters.", + exception.message + ) + ensure + ActiveRecord::ConnectionAdapters.instance_variable_get(:@adapters).delete("fake_legacy") + end + end + end +end + +module ActiveRecord + class TableMetadataTest < ActiveSupport::TestCase + # Adapter returns an object that is subclass of what is expected in the original test. + coerce_tests! %r{#associated_table creates the right type caster for joined table with different association name} + def associated_table_creates_the_right_type_caster_for_joined_table_with_different_association_name_coerced + base_table_metadata = TableMetadata.new(AuditRequiredDeveloper, Arel::Table.new("developers")) + + associated_table_metadata = base_table_metadata.associated_table("audit_logs") + + assert associated_table_metadata.arel_table.type_for_attribute(:message).is_a?(ActiveRecord::Type::String) + end + end +end + +module ActiveRecord + module TypeCaster + class ConnectionTest < ActiveSupport::TestCase + # Adapter returns an object that is subclass of what is expected in the original test. + coerce_tests! %r{#type_for_attribute is not aware of custom types} + def type_for_attribute_is_not_aware_of_custom_types_coerced + type_caster = Connection.new(AttributedDeveloper, "developers") + + type = type_caster.type_for_attribute(:name) + + assert_not_equal DeveloperName, type.class + assert type.is_a?(ActiveRecord::Type::String) + end + end + end +end + +module ActiveRecord + module Assertions + class QueryAssertionsTest < ActiveSupport::TestCase + # Query slightly different in original test. + coerce_tests! :test_assert_queries_match + def test_assert_queries_match_coerced + assert_queries_match(/ASC OFFSET 0 ROWS FETCH NEXT @0 ROWS ONLY/i, count: 1) { Post.first } + assert_queries_match(/ASC OFFSET 0 ROWS FETCH NEXT @0 ROWS ONLY/i) { Post.first } + + error = assert_raises(Minitest::Assertion) { + assert_queries_match(/ASC OFFSET 0 ROWS FETCH NEXT @0 ROWS ONLY/i, count: 2) { Post.first } + } + assert_match(/1 instead of 2 queries/, error.message) + + error = assert_raises(Minitest::Assertion) { + assert_queries_match(/ASC OFFSET 0 ROWS FETCH NEXT @0 ROWS ONLY/i, count: 0) { Post.first } + } + assert_match(/1 instead of 0 queries/, error.message) + end + end + end +end + +module ActiveRecord + class WithTest < ActiveRecord::TestCase + # SQL contains just 'WITH' instead of 'WITH RECURSIVE' as expected by the original test. + coerce_tests! :test_with_recursive + def test_with_recursive_coerced + top_companies = Company.where(firm_id: nil).to_a + child_companies = Company.where(firm_id: top_companies).to_a + top_companies_and_children = (top_companies.map(&:id) + child_companies.map(&:id)).sort + + relation = Company.with_recursive( + top_companies_and_children: [ + Company.where(firm_id: nil), + Company.joins("JOIN top_companies_and_children ON companies.firm_id = top_companies_and_children.id") + ] + ).from("top_companies_and_children AS companies") + + assert_equal top_companies_and_children, relation.order(:id).pluck(:id) + assert_match "WITH ", relation.to_sql + end + end +end + +module ActiveRecord + class AdapterConnectionTest < ActiveRecord::TestCase + # Original method only handled the core adapters. + undef_method :raw_transaction_open? + def raw_transaction_open?(connection) + transaction_count = connection.instance_variable_get(:@raw_connection).execute("SELECT @@TRANCOUNT AS TRANSACTION_COUNT").first["TRANSACTION_COUNT"] + transaction_count > 0 + rescue + false + end + end +end + +class EachTest < ActiveRecord::TestCase + # Match SQL Server limit implementation. + coerce_tests! :test_in_batches_executes_range_queries_when_unconstrained + def test_in_batches_executes_range_queries_when_unconstrained_coerced + quoted_posts_id = Regexp.escape(quote_table_name("posts.id")) + + relations = assert_queries_match(/ORDER BY #{quoted_posts_id} ASC OFFSET @\S ROWS FETCH NEXT @\S ROWS ONLY/i, count: 6) do + assert_queries_match(/ORDER BY #{quoted_posts_id} ASC OFFSET 0 ROWS FETCH NEXT @\S ROWS ONLY/i, count: 1) do + Post.in_batches(of: 2).to_a + end + end + + assert_queries_match(/WHERE #{quoted_posts_id} > .+ AND #{quoted_posts_id} <= .+/i) do + relations.each { |relation| assert_kind_of Post, relation.first } + end + end + + # Match SQL Server limit implementation. + coerce_tests! :test_in_batches_executes_in_queries_when_unconstrained_and_opted_out_of_ranges + def test_in_batches_executes_in_queries_when_unconstrained_and_opted_out_of_ranges_coerced + quoted_posts_id = Regexp.escape(quote_table_name("posts.id")) + + relations = assert_queries_match(/ORDER BY #{quoted_posts_id} ASC OFFSET 0 ROWS FETCH NEXT @\S ROWS ONLY/i, count: 6) do + Post.in_batches(of: 2, use_ranges: false).to_a + end + + assert_queries_match(/#{quoted_posts_id} IN \(.+\)/i) do + relations.each { |relation| assert_kind_of Post, relation.first } + end + end + + # Match SQL Server limit implementation. + coerce_tests! :test_in_batches_executes_in_queries_when_constrained + def test_in_batches_executes_in_queries_when_constrained_coerced + quoted_posts_id = Regexp.escape(quote_table_name("posts.id")) + + relations = assert_queries_match(/ORDER BY #{quoted_posts_id} ASC OFFSET 0 ROWS FETCH NEXT @\S ROWS ONLY/i, count: 3) do + Post.where("id < ?", 5).in_batches(of: 2).to_a + end + + assert_queries_match(/#{quoted_posts_id} IN \(.+\)/i) do + relations.each { |relation| assert_kind_of Post, relation.first } + end + end + + # Match SQL Server limit implementation. + coerce_tests! :test_in_batches_executes_range_queries_when_constrained_and_opted_in_into_ranges + def test_in_batches_executes_range_queries_when_constrained_and_opted_in_into_ranges_coerced + quoted_posts_id = Regexp.escape(quote_table_name("posts.id")) + + relations = assert_queries_match(/ORDER BY #{quoted_posts_id} ASC OFFSET @\S ROWS FETCH NEXT @\S ROWS ONLY/i, count: 3) do + assert_queries_match(/ORDER BY #{quoted_posts_id} ASC OFFSET 0 ROWS FETCH NEXT @\S ROWS ONLY/i, count: 1) do + Post.where("id < ?", 5).in_batches(of: 2, use_ranges: true).to_a + end + end + + assert_queries_match(/#{quoted_posts_id} > .+ AND #{quoted_posts_id} <= .+/i) do + relations.each { |relation| assert_kind_of Post, relation.first } + end + end + + # Match SQL Server SQL format. + coerce_tests! :test_in_batches_should_unscope_cursor_after_pluck + def test_in_batches_should_unscope_cursor_after_pluck_coerced + all_ids = Post.limit(2).pluck(:id) + found_ids = [] + # only a single clause on id (i.e. not 'id IN (?,?) AND id = ?', but only 'id = ?') + assert_queries_match(/WHERE #{Regexp.escape(quote_table_name("posts.id"))} = \S+ ORDER BY/) do + Post.where(id: all_ids).in_batches(of: 1) do |relation| + found_ids << relation.pick(:id) + end + end + assert_equal all_ids.sort, found_ids + end + + # Match SQL Server SQL format. + coerce_tests! :test_in_batches_loaded_should_unscope_cursor_after_pluck + def test_in_batches_loaded_should_unscope_cursor_after_pluck_coerced + all_ids = Post.limit(2).pluck(:id) + # only a single clause on id (i.e. not 'id IN (?,?) AND id = ?', but only 'id = ?') + assert_queries_match(/WHERE #{Regexp.escape(quote_table_name("posts.id"))} = \S+;/) do + Post.where(id: all_ids).in_batches(of: 1, load: true) do |relation| + relation.delete_all + end + end + end +end diff --git a/test/cases/column_test_sqlserver.rb b/test/cases/column_test_sqlserver.rb index 44d8fa7b5..e55937662 100644 --- a/test/cases/column_test_sqlserver.rb +++ b/test/cases/column_test_sqlserver.rb @@ -1,338 +1,862 @@ -require 'cases/sqlserver_helper' -require 'models/binary' - -class ColumnTestSqlserver < ActiveRecord::TestCase - - def setup - @connection = ActiveRecord::Base.connection - @column_klass = ActiveRecord::ConnectionAdapters::SQLServerColumn - end - - should 'return real_number as float' do - assert_equal :float, TableWithRealColumn.columns_hash["real_number"].type +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class ColumnTestSQLServer < ActiveRecord::TestCase + it "#table_name" do + assert SSTestDatatype.columns.all? { |c| c.table_name == "sst_datatypes" } + assert SSTestCustomersView.columns.all? { |c| c.table_name == "customers" } end - - should 'know its #table_name and #table_klass' do - Topic.columns.each do |column| - assert_equal 'topics', column.table_name, "This column #{column.inspect} did not know it's #table_name" - assert_equal Topic, column.table_klass, "This column #{column.inspect} did not know it's #table_klass" + + describe "ActiveRecord::ConnectionAdapters::SQLServer::Type" do + let(:obj) { SSTestDatatype.new } + + def new_obj + SSTestDatatype.new end - end - - should 'return correct null, limit, and default for Topic' do - tch = Topic.columns_hash - assert_equal false, tch['id'].null - assert_equal true, tch['title'].null - assert_equal 255, tch['author_name'].limit - assert_equal true, tch['approved'].default - assert_equal 0, tch['replies_count'].default - end - - context 'For binary columns' do - setup do - @binary_string = "GIF89a\001\000\001\000\200\000\000\377\377\377\000\000\000!\371\004\000\000\000\000\000,\000\000\000\000\001\000\001\000\000\002\002D\001\000;" - @saved_bdata = Binary.create!(:data => @binary_string) + def column(name) + SSTestDatatype.columns_hash[name] end - - should 'read and write binary data equally' do - assert_equal @binary_string, Binary.find(@saved_bdata).data + + def assert_obj_set_and_save(attribute, value) + obj.send :"#{attribute}=", value + _(obj.send(attribute)).must_equal value + obj.save! + _(obj.reload.send(attribute)).must_equal value end - - should 'have correct attributes' do - column = Binary.columns_hash['data'] - assert_equal :binary, column.type - assert_equal @connection.native_binary_database_type, column.sql_type - assert_equal nil, column.limit + + # http://msdn.microsoft.com/en-us/library/ms187752.aspx + + # Exact Numerics + + it "int(4) PRIMARY KEY" do + col = column("id") + _(col.sql_type).must_equal "int(4)" + _(col.null).must_equal false end - - should 'quote data for sqlserver with literal 0x prefix' do - # See the output of the stored procedure: 'exec sp_datatype_info' - sqlserver_encoded_bdata = "0x47494638396101000100800000ffffff00000021f90400000000002c00000000010001000002024401003b" - assert_equal sqlserver_encoded_bdata, @column_klass.string_to_binary(@binary_string) + + it "bigint(8)" do + col = column("bigint") + _(col.sql_type).must_equal "bigint(8)" + _(col.type).must_equal :integer + _(col.null).must_equal true + _(col.default).must_equal 42 + _(obj.bigint).must_equal 42 + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::BigInteger + _(type.limit).must_equal 8 + assert_obj_set_and_save :bigint, -9_223_372_036_854_775_808 + assert_obj_set_and_save :bigint, 9_223_372_036_854_775_807 end - end - - context 'For string columns' do + it "int(4)" do + col = column("int") + _(col.sql_type).must_equal "int(4)" + _(col.type).must_equal :integer + _(col.null).must_equal true + _(col.default).must_equal 42 + _(obj.int).must_equal 42 + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Integer + _(type.limit).must_equal 4 + assert_obj_set_and_save :int, -2_147_483_648 + assert_obj_set_and_save :int, 2_147_483_647 + end - setup do - @char = SqlServerString.columns_hash['char'] - @char10 = SqlServerString.columns_hash['char_10'] - @varcharmax = SqlServerString.columns_hash['varchar_max'] - @varcharmax10 = SqlServerString.columns_hash['varchar_max_10'] + it "smallint(2)" do + col = column("smallint") + _(col.sql_type).must_equal "smallint(2)" + _(col.type).must_equal :integer + _(col.null).must_equal true + _(col.default).must_equal 42 + _(obj.smallint).must_equal 42 + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::SmallInteger + _(type.limit).must_equal 2 + assert_obj_set_and_save :smallint, -32_768 + assert_obj_set_and_save :smallint, 32_767 end - should 'have correct simplified types' do - assert_equal :string, @char.type - assert_equal :string, @char10.type - assert_equal :text, @varcharmax.type, @varcharmax.inspect - assert_equal :text, @varcharmax10.type, @varcharmax10.inspect + it "tinyint(1)" do + col = column("tinyint") + _(col.sql_type).must_equal "tinyint(1)" + _(col.type).must_equal :integer + _(col.null).must_equal true + _(col.default).must_equal 42 + _(obj.tinyint).must_equal 42 + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::TinyInteger + _(type.limit).must_equal 1 + assert_obj_set_and_save :tinyint, 0 + assert_obj_set_and_save :tinyint, 255 end - - should 'have correct #sql_type per schema definition' do - assert_equal 'char(1)', @char.sql_type, 'Specifing a char type with no limit is 1 by SQL Server standards.' - assert_equal 'char(10)', @char10.sql_type, @char10.inspect - assert_equal 'varchar(max)', @varcharmax.sql_type, 'A -1 limit should be converted to max (max) type.' - assert_equal 'varchar(max)', @varcharmax10.sql_type, 'A -1 limit should be converted to max (max) type.' + + it "bit" do + col = column("bit") + _(col.sql_type).must_equal "bit" + _(col.type).must_equal :boolean + _(col.null).must_equal true + _(col.default).must_equal true + _(obj.bit).must_equal true + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Boolean + _(type.limit).must_be_nil + obj.bit = 0 + _(obj.bit).must_equal false + obj.save! + _(obj.reload.bit).must_equal false + obj.bit = "1" + _(obj.bit).must_equal true + obj.save! + _(obj.reload.bit).must_equal true end - - should 'have correct #limit per schema definition' do - assert_equal 1, @char.limit - assert_equal 10, @char10.limit - assert_equal nil, @varcharmax.limit, 'Limits on max types are moot and we should let rails know that.' - assert_equal nil, @varcharmax10.limit, 'Limits on max types are moot and we should let rails know that.' + + it "decimal(9,2)" do + col = column("decimal_9_2") + _(col.sql_type).must_equal "decimal(9,2)" + _(col.type).must_equal :decimal + _(col.null).must_equal true + _(col.default).must_equal BigDecimal("12345.01") + _(obj.decimal_9_2).must_equal BigDecimal("12345.01") + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Decimal + _(type.limit).must_be_nil + _(type.precision).must_equal 9 + _(type.scale).must_equal 2 + obj.decimal_9_2 = "1234567.8901" + _(obj.decimal_9_2).must_equal BigDecimal("1234567.89") + obj.save! + _(obj.reload.decimal_9_2).must_equal BigDecimal("1234567.89") end - end - - context 'For all national/unicode columns' do - - setup do - @nchar = SqlServerUnicode.columns_hash['nchar'] - @nvarchar = SqlServerUnicode.columns_hash['nvarchar'] - @ntext = SqlServerUnicode.columns_hash['ntext'] - @ntext10 = SqlServerUnicode.columns_hash['ntext_10'] - @nchar10 = SqlServerUnicode.columns_hash['nchar_10'] - @nvarchar100 = SqlServerUnicode.columns_hash['nvarchar_100'] - @nvarcharmax = SqlServerUnicode.columns_hash['nvarchar_max'] - @nvarcharmax10 = SqlServerUnicode.columns_hash['nvarchar_max_10'] - end - - should 'all respond true to #is_utf8?' do - SqlServerUnicode.columns_hash.except('id').values.each do |column| - assert column.is_utf8?, "This column #{column.inspect} should have been a unicode column." - end - end - - should 'have correct simplified types' do - assert_equal :string, @nchar.type - assert_equal :string, @nvarchar.type - assert_equal :text, @ntext.type - assert_equal :text, @ntext10.type - assert_equal :string, @nchar10.type - assert_equal :string, @nvarchar100.type - assert_equal :text, @nvarcharmax.type, @nvarcharmax.inspect - assert_equal :text, @nvarcharmax10.type, @nvarcharmax10.inspect - end - - should 'have correct #sql_type per schema definition' do - assert_equal 'nchar(1)', @nchar.sql_type, 'Specifing a nchar type with no limit is 1 by SQL Server standards.' - assert_equal 'nvarchar(255)', @nvarchar.sql_type, 'Default nvarchar limit is 255.' - assert_equal 'ntext', @ntext.sql_type, 'Nice and clean ntext, limit means nothing here.' - assert_equal 'ntext', @ntext10.sql_type, 'Even a next with a limit of 10 specified will mean nothing.' - assert_equal 'nchar(10)', @nchar10.sql_type, 'An nchar with a limit of 10 needs to have it show up here.' - assert_equal 'nvarchar(100)', @nvarchar100.sql_type, 'An nvarchar with a specified limit of 100 needs to show it.' - assert_equal 'nvarchar(max)', @nvarcharmax.sql_type, 'A -1 limit should be converted to max (max) type.' - assert_equal 'nvarchar(max)', @nvarcharmax10.sql_type, 'A -1 limit should be converted to max (max) type.' - end - - should 'have correct #limit per schema definition' do - assert_equal 1, @nchar.limit - assert_equal 255, @nvarchar.limit - assert_equal nil, @ntext.limit, 'An ntext column limit is moot, it is a fixed variable length' - assert_equal 10, @nchar10.limit - assert_equal 100, @nvarchar100.limit - assert_equal nil, @nvarcharmax.limit, 'Limits on max types are moot and we should let rails know that.' - assert_equal nil, @nvarcharmax10.limit, 'Limits on max types are moot and we should let rails know that.' - end - - end - - context 'For datetime columns' do - - setup do - @date = SqlServerChronic.columns_hash['date'] - @time = SqlServerChronic.columns_hash['time'] - @datetime = SqlServerChronic.columns_hash['datetime'] - @smalldatetime = SqlServerChronic.columns_hash['smalldatetime'] - @timestamp = SqlServerChronic.columns_hash['timestamp'] - @ss_timestamp = SqlServerChronic.columns_hash['ss_timestamp'] - end - - should 'have correct simplified type for uncast datetime' do - assert_equal :datetime, @datetime.type - end - - should 'use correct #sql_type for different sql server versions' do - assert_equal 'datetime', @datetime.sql_type - if sqlserver_2005? - assert_equal 'datetime', @date.sql_type - assert_equal 'datetime', @time.sql_type - else - assert_equal 'date', @date.sql_type - assert_equal 'time', @time.sql_type - end - end - - should 'all be have nil #limit' do - assert_equal nil, @date.limit - assert_equal nil, @time.limit - assert_equal nil, @datetime.limit - end - - context 'with timestamps' do - - should 'use datetime sql type when using :timestamp in schema statements' do - assert_equal :datetime, @timestamp.type - assert_equal 'datetime', @timestamp.sql_type - end - - should 'be able to use real sql server timestamp if you really want to' do - assert_equal :binary, @ss_timestamp.type - assert_equal 'timestamp', @ss_timestamp.sql_type - end unless sqlserver_azure? - - should 'return :timestamp as a binaryish string' do - chronic = SqlServerChronic.create!.reload - assert_match %r|\000|, chronic.ss_timestamp - end unless sqlserver_azure? - - end - - context 'For smalldatetime types' do - - should 'have created that type using rails migrations' do - assert_equal 'smalldatetime', @smalldatetime.sql_type - end - - should 'be able to insert column without truncation warnings or the like' do - SqlServerChronic.create! :smalldatetime => Time.now - end - - should 'be able to update column without truncation warnings or the like' do - ssc = SqlServerChronic.create! :smalldatetime => 2.days.ago - ssc.update_attributes! :smalldatetime => Time.now - end - - end - - context 'which have coerced types' do - - setup do - christmas_08 = "2008-12-25".to_time - christmas_08_afternoon = "2008-12-25 12:00".to_time - @chronic_date = SqlServerChronic.create!(:date => christmas_08).reload - @chronic_time = SqlServerChronic.create!(:time => christmas_08_afternoon).reload - end - - should 'have an inheritable attribute ' do - assert SqlServerChronic.coerced_sqlserver_date_columns.include?('date') unless sqlserver_2008? - end - - should 'have column and objects cast to date' do - assert_equal :date, @date.type, "This column: \n#{@date.inspect}" - assert_instance_of Date, @chronic_date.date - end - - should 'have column objects cast to time' do - assert_equal :time, @time.type, "This column: \n#{@time.inspect}" - assert_instance_of Time, @chronic_time.time - end - + it "decimal(16,4)" do + col = column("decimal_16_4") + _(col.sql_type).must_equal "decimal(16,4)" + _(col.default).must_equal BigDecimal("1234567.89") + _(obj.decimal_16_4).must_equal BigDecimal("1234567.89") + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type.precision).must_equal 16 + _(type.scale).must_equal 4 + obj.decimal_16_4 = "1234567.8901001" + _(obj.decimal_16_4).must_equal BigDecimal("1234567.8901") + obj.save! + _(obj.reload.decimal_16_4).must_equal BigDecimal("1234567.8901") end - end - - context 'For decimal and numeric columns' do - - setup do - @bank_balance = NumericData.columns_hash['bank_balance'] - @big_bank_balance = NumericData.columns_hash['big_bank_balance'] - @world_population = NumericData.columns_hash['world_population'] - @my_house_population = NumericData.columns_hash['my_house_population'] - end - - should 'have correct simplified types' do - assert_equal :decimal, @bank_balance.type - assert_equal :decimal, @big_bank_balance.type - assert_equal :integer, @world_population.type, 'Since #extract_scale == 0' - assert_equal :integer, @my_house_population.type, 'Since #extract_scale == 0' - end - - should 'have correct #sql_type' do - assert_equal 'decimal(10,2)', @bank_balance.sql_type - assert_equal 'decimal(15,2)', @big_bank_balance.sql_type - assert_equal 'decimal(10,0)', @world_population.sql_type - assert_equal 'decimal(2,0)', @my_house_population.sql_type - end - - should 'have correct #limit' do - assert_equal nil, @bank_balance.limit - assert_equal nil, @big_bank_balance.limit - assert_equal nil, @world_population.limit - assert_equal nil, @my_house_population.limit - end - - should 'return correct precisions and scales' do - assert_equal [10,2], [@bank_balance.precision, @bank_balance.scale] - assert_equal [15,2], [@big_bank_balance.precision, @big_bank_balance.scale] - assert_equal [10,0], [@world_population.precision, @world_population.scale] - assert_equal [2,0], [@my_house_population.precision, @my_house_population.scale] - end - - end - - context 'For float columns' do - # NOTE: float limits are adjusted to 24 or 53 by the database as per - # http://msdn.microsoft.com/en-us/library/ms173773.aspx - # NOTE: floats with a limit of <= 24 are reduced to reals by sqlserver on creation + it "numeric(18,0)" do + col = column("numeric_18_0") + _(col.sql_type).must_equal "numeric(18,0)" + _(col.type).must_equal :decimal + _(col.null).must_equal true + _(col.default).must_equal BigDecimal(191) + _(obj.numeric_18_0).must_equal BigDecimal(191) + _(col.default_function).must_be_nil + + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::DecimalWithoutScale + _(type.limit).must_be_nil + _(type.precision).must_equal 18 + _(type.scale).must_be_nil + + obj.numeric_18_0 = "192.1" + _(obj.numeric_18_0).must_equal BigDecimal(192) - setup do - @temperature = FloatData.columns_hash['temperature'] - @freezing = FloatData.columns_hash['temperature_8'] - @mild = FloatData.columns_hash['temperature_24'] - @beach = FloatData.columns_hash['temperature_32'] - @desert = FloatData.columns_hash['temperature_53'] + obj.save! + _(obj.reload.numeric_18_0).must_equal BigDecimal(192) end - should 'have correct simplified types' do - assert_equal :float, @temperature.type - assert_equal :float, @freezing.type - assert_equal :float, @mild.type - assert_equal :float, @beach.type - assert_equal :float, @desert.type + it "numeric(36,2)" do + col = column("numeric_36_2") + _(col.sql_type).must_equal "numeric(36,2)" + _(col.type).must_equal :decimal + _(col.null).must_equal true + _(col.default).must_equal BigDecimal("12345678901234567890.01") + _(obj.numeric_36_2).must_equal BigDecimal("12345678901234567890.01") + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Decimal + _(type.limit).must_be_nil + _(type.precision).must_equal 36 + _(type.scale).must_equal 2 + obj.numeric_36_2 = "192.123" + _(obj.numeric_36_2).must_equal BigDecimal("192.12") + obj.save! + _(obj.reload.numeric_36_2).must_equal BigDecimal("192.12") end - should 'have correct #sql_type' do - assert_equal 'real(24)', @temperature.sql_type - assert_equal 'real(24)', @freezing.sql_type - assert_equal 'real(24)', @mild.sql_type - assert_equal 'float(53)', @beach.sql_type - assert_equal 'float(53)', @desert.sql_type + it "money" do + col = column("money") + _(col.sql_type).must_equal "money" + _(col.type).must_equal :money + _(col.null).must_equal true + _(col.default).must_equal BigDecimal("4.20") + _(obj.money).must_equal BigDecimal("4.20") + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Money + _(type.limit).must_be_nil + _(type.precision).must_equal 19 + _(type.scale).must_equal 4 + obj.money = "922337203685477.58061" + _(obj.money).must_equal BigDecimal("922337203685477.5806") + obj.save! + _(obj.reload.money).must_equal BigDecimal("922337203685477.5806") end - should 'have correct #limit' do - assert_equal 24, @temperature.limit - assert_equal 24, @freezing.limit - assert_equal 24, @mild.limit - assert_equal 53, @beach.limit - assert_equal 53, @desert.limit + it "smallmoney" do + col = column("smallmoney") + _(col.sql_type).must_equal "smallmoney" + _(col.type).must_equal :smallmoney + _(col.null).must_equal true + _(col.default).must_equal BigDecimal("4.20") + _(obj.smallmoney).must_equal BigDecimal("4.20") + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::SmallMoney + _(type.limit).must_be_nil + _(type.precision).must_equal 10 + _(type.scale).must_equal 4 + obj.smallmoney = "214748.36461" + _(obj.smallmoney).must_equal BigDecimal("214748.3646") + obj.save! + _(obj.reload.smallmoney).must_equal BigDecimal("214748.3646") end - should 'return nil precisions and scales' do - assert_equal [nil,nil], [@temperature.precision, @temperature.scale] - assert_equal [nil,nil], [@freezing.precision, @freezing.scale] - assert_equal [nil,nil], [@mild.precision, @mild.scale] - assert_equal [nil,nil], [@beach.precision, @beach.scale] - assert_equal [nil,nil], [@desert.precision, @desert.scale] + # Approximate Numerics + # Float limits are adjusted to 24 or 53 by the database as per http://msdn.microsoft.com/en-us/library/ms173773.aspx + # Floats with a limit of <= 24 are reduced to reals by sqlserver on creation. + + it "float" do + col = column("float") + _(col.sql_type).must_equal "float" + _(col.type).must_equal :float + _(col.null).must_equal true + _(col.default).must_equal 123.00000001 + _(obj.float).must_equal 123.00000001 + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Float + _(type.limit).must_be_nil + _(type.precision).must_be_nil + _(type.scale).must_be_nil + obj.float = "214748.36461" + _(obj.float).must_equal 214748.36461 + obj.save! + _(obj.reload.float).must_equal 214748.36461 end - end - - context 'For tinyint columns' do + it "real" do + col = column("real") + _(col.sql_type).must_equal "real" + _(col.type).must_equal :real + _(col.null).must_equal true + _(col.default).must_be_close_to 123.45, 0.01 + _(obj.real).must_be_close_to 123.45, 0.01 + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Real + _(type.limit).must_be_nil + _(type.precision).must_be_nil + _(type.scale).must_be_nil + obj.real = "214748.36461" + _(obj.real).must_be_close_to 214748.36461, 0.01 + obj.save! + _(obj.reload.real).must_be_close_to 214748.36461, 0.01 + end + + # Date and Time + + it "date" do + col = column("date") + _(col.sql_type).must_equal "date" + _(col.type).must_equal :date + _(col.null).must_equal true + _(col.default).must_equal Date.civil(1, 1, 1) + _(obj.date).must_equal Date.civil(1, 1, 1) + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Date + _(type.limit).must_be_nil + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Can cast strings. SQL Server format. + obj.date = "04-01-0001" + _(obj.date).must_equal Date.civil(1, 4, 1) + obj.save! + _(obj.date).must_equal Date.civil(1, 4, 1) + obj.reload + _(obj.date).must_equal Date.civil(1, 4, 1) + # Can cast strings. ISO format. + obj.date = "0001-04-01" + _(obj.date).must_equal Date.civil(1, 4, 1) + obj.save! + _(obj.date).must_equal Date.civil(1, 4, 1) + obj.reload + _(obj.date).must_equal Date.civil(1, 4, 1) + # Can filter by date range + _(obj).must_equal obj.class.where(date: obj.date..Date::Infinity.new).first + # Can keep and return assigned date. + assert_obj_set_and_save :date, Date.civil(1972, 4, 14) + # Can accept and cast time objects. + obj.date = Time.utc(2010, 4, 14, 12, 34, 56, 3000) + _(obj.date).must_equal Date.civil(2010, 4, 14) + obj.save! + _(obj.reload.date).must_equal Date.civil(2010, 4, 14) + end + + it "datetime" do + col = column("datetime") + _(col.sql_type).must_equal "datetime" + _(col.type).must_equal :datetime + _(col.null).must_equal true + time = Time.utc 1753, 1, 1, 0, 0, 0, 123000 + _(col.default).must_equal time, "Microseconds were <#{col.default.usec}> vs <123000>" + _(obj.datetime).must_equal time, "Microseconds were <#{obj.datetime.usec}> vs <123000>" + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::DateTime + _(type.limit).must_be_nil + _(type.precision).must_be_nil + _(type.scale).must_be_nil + obj.save! + _(obj).must_equal obj.class.where(datetime: time).first + # Can save to proper accuracy and return again. + time = Time.utc 2010, 4, 1, 12, 34, 56, 3000 + obj.datetime = time + _(obj.datetime).must_equal time, "Microseconds were <#{obj.datetime.usec}> vs <3000>" + obj.save! + _(obj.datetime).must_equal time, "Microseconds were <#{obj.datetime.usec}> vs <3000>" + obj.reload + _(obj.datetime).must_equal time, "Microseconds were <#{obj.datetime.usec}> vs <3000>" + _(obj).must_equal obj.class.where(datetime: time).first + # Can filter by datetime range + _(obj).must_equal obj.class.where(datetime: time..DateTime::Infinity.new).first + # Will cast to true DB value on attribute write, save and return again. + time = Time.utc 2010, 4, 1, 12, 34, 56, 234567 + time2 = Time.utc 2010, 4, 1, 12, 34, 56, 233000 + obj.datetime = time + _(obj.datetime).must_equal time2, "Microseconds were <#{obj.datetime.usec}> vs <233000>" + obj.save! + _(obj.datetime).must_equal time2, "Microseconds were <#{obj.datetime.usec}> vs <233000>" + obj.reload + _(obj.datetime).must_equal time2, "Microseconds were <#{obj.datetime.usec}> vs <233000>" + _(obj).must_equal obj.class.where(datetime: time).first + _(obj).must_equal obj.class.where(datetime: time2).first + # Set and find nil. + obj.datetime = nil + _(obj.datetime).must_be_nil + obj.save! + _(obj.datetime).must_be_nil + _(obj).must_equal obj.class.where(datetime: nil).first + end + + it "datetime2" do + col = column("datetime2_7") + _(col.sql_type).must_equal "datetime2(7)" + _(col.type).must_equal :datetime + _(col.null).must_equal true + time = Time.utc 9999, 12, 31, 23, 59, 59, Rational(999999900, 1000) + _(col.default).must_equal time, "Nanoseconds were <#{col.default.nsec}> vs <999999900>" + _(obj.datetime2_7).must_equal time, "Nanoseconds were <#{obj.datetime2_7.nsec}> vs <999999900>" + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::DateTime2 + _(type.limit).must_be_nil + _(type.precision).must_equal 7 + _(type.scale).must_be_nil + obj.save! + _(obj).must_equal obj.class.where(datetime2_7: time).first + # Can save 100 nanosecond precisions and return again. + time = Time.utc 9999, 12, 31, 23, 59, 59, Rational(123456755, 1000) + time2 = Time.utc 9999, 12, 31, 23, 59, 59, Rational(123456800, 1000) + obj.datetime2_7 = time + _(obj.datetime2_7).must_equal time2, "Nanoseconds were <#{obj.datetime2_7.nsec}> vs <123456800>" + obj.save! + _(obj.datetime2_7).must_equal time2, "Nanoseconds were <#{obj.datetime2_7.nsec}> vs <123456800>" + obj.reload + _(obj.datetime2_7).must_equal time2, "Nanoseconds were <#{obj.datetime2_7.nsec}> vs <123456800>" + _(obj).must_equal obj.class.where(datetime2_7: time).first + _(obj).must_equal obj.class.where(datetime2_7: time2).first + # Can save small fraction nanosecond precisions and return again. + time = Time.utc 2008, 6, 21, 13, 30, 0, Rational(15020, 1000) + time2 = Time.utc 2008, 6, 21, 13, 30, 0, Rational(15000, 1000) + obj.datetime2_7 = time + _(obj.datetime2_7).must_equal time2, "Nanoseconds were <#{obj.datetime2_7.nsec}> vs <15000>" + obj.save! + _(obj.reload.datetime2_7).must_equal time2, "Nanoseconds were <#{obj.datetime2_7.nsec}> vs <15000>" + _(obj).must_equal obj.class.where(datetime2_7: time).first + _(obj).must_equal obj.class.where(datetime2_7: time2).first + # datetime2_3 + time = Time.utc 9999, 12, 31, 23, 59, 59, Rational(123456789, 1000) + col = column("datetime2_3") + _(col.fetch_cast_type(connection).precision).must_equal 3 + obj.datetime2_3 = time + _(obj.datetime2_3).must_equal time.change(nsec: 123000000), "Nanoseconds were <#{obj.datetime2_3.nsec}> vs <123000000>" + obj.save! + obj.reload + _(obj.datetime2_3).must_equal time.change(nsec: 123000000), "Nanoseconds were <#{obj.datetime2_3.nsec}> vs <123000000>" + _(obj).must_equal obj.class.where(datetime2_3: time).first + # datetime2_1 + col = column("datetime2_1") + _(col.fetch_cast_type(connection).precision).must_equal 1 + obj.datetime2_1 = time + _(obj.datetime2_1).must_equal time.change(nsec: 100000000), "Nanoseconds were <#{obj.datetime2_1.nsec}> vs <100000000>" + obj.save! + obj.reload + _(obj.datetime2_1).must_equal time.change(nsec: 100000000), "Nanoseconds were <#{obj.datetime2_1.nsec}> vs <100000000>" + _(obj).must_equal obj.class.where(datetime2_1: time).first + # datetime2_0 + col = column("datetime2_0") + _(col.fetch_cast_type(connection).precision).must_equal 0 + time = Time.utc 2016, 4, 19, 16, 45, 40, 771036 + obj.datetime2_0 = time + _(obj.datetime2_0).must_equal time.change(nsec: 0), "Nanoseconds were <#{obj.datetime2_0.nsec}> vs <0>" + obj.save! + obj.reload + _(obj.datetime2_0).must_equal time.change(nsec: 0), "Nanoseconds were <#{obj.datetime2_0.nsec}> vs <0>" + _(obj).must_equal obj.class.where(datetime2_0: time).first + end + + it "datetimeoffset" do + col = column("datetimeoffset_7") + _(col.sql_type).must_equal "datetimeoffset(7)" + _(col.type).must_equal :datetimeoffset + _(col.null).must_equal true + _(col.default).must_equal Time.new(1984, 1, 24, 4, 20, 0, -28800).change(nsec: 123456700), "Nanoseconds <#{col.default.nsec}> vs <123456700>" + _(obj.datetimeoffset_7).must_equal Time.new(1984, 1, 24, 4, 20, 0, -28800).change(nsec: 123456700), "Nanoseconds were <#{obj.datetimeoffset_7.nsec}> vs <999999900>" + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::DateTimeOffset + _(type.limit).must_be_nil + _(type.precision).must_equal 7 + _(type.scale).must_be_nil + + # Can save 100 nanosecond precisions and return again. + obj.datetimeoffset_7 = Time.new(2010, 4, 1, 12, 34, 56, +18000).change(nsec: 123456755) + _(obj.datetimeoffset_7).must_equal Time.new(2010, 4, 1, 12, 34, 56, +18000).change(nsec: 123456800), "Nanoseconds were <#{obj.datetimeoffset_7.nsec}> vs <123456800>" + obj.save! + _(obj.datetimeoffset_7).must_equal Time.new(2010, 4, 1, 12, 34, 56, +18000).change(nsec: 123456800), "Nanoseconds were <#{obj.datetimeoffset_7.nsec}> vs <123456800>" + obj.reload + _(obj.datetimeoffset_7).must_equal Time.new(2010, 4, 1, 12, 34, 56, +18000).change(nsec: 123456800), "Nanoseconds were <#{obj.datetimeoffset_7.nsec}> vs <123456800>" + + # Maintains the timezone + time = ActiveSupport::TimeZone["America/Los_Angeles"].local 2010, 12, 31, 23, 59, 59, Rational(123456800, 1000) + obj.datetimeoffset_7 = time + _(obj.datetimeoffset_7).must_equal time + obj.save! + _(obj.datetimeoffset_7).must_equal time + _(obj.reload.datetimeoffset_7).must_equal time + + # With other precisions. + time = ActiveSupport::TimeZone["America/Los_Angeles"].local 2010, 12, 31, 23, 59, 59, Rational(123456755, 1000) + col = column("datetimeoffset_3") + _(col.fetch_cast_type(connection).precision).must_equal 3 + obj.datetimeoffset_3 = time + _(obj.datetimeoffset_3).must_equal time.change(nsec: 123000000), "Nanoseconds were <#{obj.datetimeoffset_3.nsec}> vs <123000000>" + obj.save! + _(obj.datetimeoffset_3).must_equal time.change(nsec: 123000000), "Nanoseconds were <#{obj.datetimeoffset_3.nsec}> vs <123000000>" + col = column("datetime2_1") + _(col.fetch_cast_type(connection).precision).must_equal 1 + obj.datetime2_1 = time + _(obj.datetime2_1).must_equal time.change(nsec: 100000000), "Nanoseconds were <#{obj.datetime2_1.nsec}> vs <100000000>" + obj.save! + _(obj.datetime2_1).must_equal time.change(nsec: 100000000), "Nanoseconds were <#{obj.datetime2_1.nsec}> vs <100000000>" + end - setup do - @tinyint = SqlServerEdgeSchema.columns_hash['tinyint'] + it "smalldatetime" do + col = column("smalldatetime") + _(col.sql_type).must_equal "smalldatetime" + _(col.type).must_equal :smalldatetime + _(col.null).must_equal true + _(col.default).must_equal Time.utc(1901, 1, 1, 15, 45, 0, 0) + _(obj.smalldatetime).must_equal Time.utc(1901, 1, 1, 15, 45, 0, 0) + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::SmallDateTime + _(type.limit).must_be_nil + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Will remove fractional seconds and return again. + obj.smalldatetime = Time.utc(2078, 6, 5, 4, 20, 0, 3000) + _(obj.smalldatetime).must_equal Time.utc(2078, 6, 5, 4, 20, 0, 0), "Microseconds were <#{obj.smalldatetime.usec}> vs <0>" + obj.save! + _(obj.smalldatetime).must_equal Time.utc(2078, 6, 5, 4, 20, 0, 0), "Microseconds were <#{obj.reload.smalldatetime.usec}> vs <0>" + obj.reload + _(obj.smalldatetime).must_equal Time.utc(2078, 6, 5, 4, 20, 0, 0), "Microseconds were <#{obj.reload.smalldatetime.usec}> vs <0>" end - should 'be all it should be' do - assert_equal :integer, @tinyint.type - assert_nil @tinyint.scale - assert_equal 'tinyint(1)', @tinyint.sql_type + it "time(7)" do + col = column("time_7") + _(col.sql_type).must_equal "time(7)" + _(col.type).must_equal :time + _(col.null).must_equal true + _(col.default).must_equal Time.utc(1900, 1, 1, 4, 20, 0, Rational(288321500, 1000)), "Nanoseconds were <#{col.default.nsec}> vs <288321500>" + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Time + _(type.limit).must_be_nil + _(type.precision).must_equal 7 + _(type.scale).must_be_nil + # Time's #usec precision (low micro) + obj.time_7 = Time.utc(2000, 1, 1, 15, 45, 0, 300) + _(obj.time_7).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 300), "Microseconds were <#{obj.time_7.usec}> vs <0>" + _(obj.time_7).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 300), "Nanoseconds were <#{obj.time_7.nsec}> vs <300>" + obj.save! + obj.reload + _(obj.time_7).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 300), "Microseconds were <#{obj.time_7.usec}> vs <0>" + _(obj.time_7).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 300), "Nanoseconds were <#{obj.time_7.nsec}> vs <300>" + # Time's #usec precision (high micro) + obj.time_7 = Time.utc(2000, 1, 1, 15, 45, 0, 234567) + _(obj.time_7).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 234567), "Microseconds were <#{obj.time_7.usec}> vs <234567>" + obj.save! + obj.reload + _(obj.time_7).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 234567), "Microseconds were <#{obj.time_7.usec}> vs <234567>" + # Time's #usec precision (high nano rounded) + obj.time_7 = Time.utc(2000, 1, 1, 15, 45, 0, Rational(288321545, 1000)) + _(obj.time_7).must_equal Time.utc(2000, 1, 1, 15, 45, 0, Rational(288321500, 1000)), "Nanoseconds were <#{obj.time_7.nsec}> vs <288321500>" + obj.save! + obj.reload + _(obj.time_7).must_equal Time.utc(2000, 1, 1, 15, 45, 0, Rational(288321500, 1000)), "Nanoseconds were <#{obj.time_7.nsec}> vs <288321500>" end + it "time(2)" do + col = column("time_2") + _(col.sql_type).must_equal "time(2)" + _(col.type).must_equal :time + _(col.null).must_equal true + _(col.default).must_be_nil + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Time + _(type.limit).must_be_nil + _(type.precision).must_equal 2 + _(type.scale).must_be_nil + # Always uses TinyTDS/Windows 2000-01-01 convention too. + obj.time_2 = Time.utc(2015, 1, 10, 15, 45, 0, 0) + _(obj.time_2).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 0) + obj.save! + obj.reload + _(obj.time_2).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 0) + # Time's #usec precision (barely in 2 precision equal to 0.03 seconds) + obj.time_2 = Time.utc(2000, 1, 1, 15, 45, 0, 30000) + _(obj.time_2).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 30000), "Microseconds were <#{obj.time_2.usec}> vs <30000>" + obj.save! + obj.reload + _(obj.time_2).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 30000), "Microseconds were <#{obj.time_2.usec}> vs <30000>" + # Time's #usec precision (below 2 precision) + obj.time_2 = Time.utc(2000, 1, 1, 15, 45, 0, 4000) + _(obj.time_2).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 0), "Microseconds were <#{obj.time_2.usec}> vs <0>" + obj.save! + obj.reload + _(obj.time_2).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 0), "Microseconds were <#{obj.time_2.usec}> vs <0>" + end + + it "time using default precision" do + col = column("time_default") + _(col.sql_type).must_equal "time(7)" + _(col.type).must_equal :time + _(col.null).must_equal true + _(col.default).must_equal Time.utc(1900, 1, 1, 15, 3, 42, Rational(62197800, 1000)), "Nanoseconds were <#{col.default.nsec}> vs <62197800>" + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Time + _(type.limit).must_be_nil + _(type.precision).must_equal 7 + _(type.scale).must_be_nil + # Time's #usec precision (low micro) + obj.time_default = Time.utc(2000, 1, 1, 15, 45, 0, 300) + _(obj.time_default).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 300), "Microseconds were <#{obj.time_default.usec}> vs <0>" + _(obj.time_default).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 300), "Nanoseconds were <#{obj.time_default.nsec}> vs <300>" + obj.save! + obj.reload + _(obj.time_default).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 300), "Microseconds were <#{obj.time_default.usec}> vs <0>" + _(obj.time_default).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 300), "Nanoseconds were <#{obj.time_default.nsec}> vs <300>" + # Time's #usec precision (high micro) + obj.time_default = Time.utc(2000, 1, 1, 15, 45, 0, 234567) + _(obj.time_default).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 234567), "Microseconds were <#{obj.time_default.usec}> vs <234567>" + obj.save! + obj.reload + _(obj.time_default).must_equal Time.utc(2000, 1, 1, 15, 45, 0, 234567), "Microseconds were <#{obj.time_default.usec}> vs <234567>" + # Time's #usec precision (high nano rounded) + obj.time_default = Time.utc(2000, 1, 1, 15, 45, 0, Rational(288321545, 1000)) + _(obj.time_default).must_equal Time.utc(2000, 1, 1, 15, 45, 0, Rational(288321500, 1000)), "Nanoseconds were <#{obj.time_default.nsec}> vs <288321500>" + obj.save! + obj.reload + _(obj.time_default).must_equal Time.utc(2000, 1, 1, 15, 45, 0, Rational(288321500, 1000)), "Nanoseconds were <#{obj.time_default.nsec}> vs <288321500>" + end + + # Character Strings + + it "char(10)" do + col = column("char_10") + _(col.sql_type).must_equal "char(10)" + _(col.type).must_equal :char + _(col.null).must_equal true + _(col.default).must_equal "1234567890" + _(obj.char_10).must_equal "1234567890" + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Char + _(type.limit).must_equal 10 + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Basic set and save. + obj.char_10 = "012345" + _(obj.char_10.strip).must_equal "012345" + obj.save! + _(obj.reload.char_10.strip).must_equal "012345" + end + + it "varchar(50)" do + col = column("varchar_50") + _(col.sql_type).must_equal "varchar(50)" + _(col.type).must_equal :varchar + _(col.null).must_equal true + _(col.default).must_equal "test varchar_50" + _(obj.varchar_50).must_equal "test varchar_50" + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Varchar + _(type.limit).must_equal 50 + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Basic set and save. + assert_obj_set_and_save :varchar_50, "Hello World" + end + + it "varchar(max)" do + col = column("varchar_max") + _(col.sql_type).must_equal "varchar(max)" + _(col.type).must_equal :varchar_max + _(col.null).must_equal true + _(col.default).must_equal "test varchar_max" + _(obj.varchar_max).must_equal "test varchar_max" + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::VarcharMax + _(type.limit).must_equal 2_147_483_647 + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Basic set and save. + assert_obj_set_and_save :varchar_max, "Hello World" + end + + it "text" do + col = column("text") + _(col.sql_type).must_equal "text" + _(col.type).must_equal :text_basic + _(col.null).must_equal true + _(col.default).must_equal "test text" + _(obj.text).must_equal "test text" + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Text + _(type.limit).must_equal 2_147_483_647 + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Basic set and save. + assert_obj_set_and_save :text, "Hello World" + end + + # Unicode Character Strings + + it "nchar(10)" do + col = column("nchar_10") + _(col.sql_type).must_equal "nchar(10)" + _(col.type).must_equal :nchar + _(col.null).must_equal true + _(col.default).must_equal "12345678åå" + _(obj.nchar_10).must_equal "12345678åå" + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::UnicodeChar + _(type.limit).must_equal 10 + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Basic set and save. + obj.nchar_10 = "五六" + _(obj.nchar_10.strip).must_equal "五六" + obj.save! + _(obj.reload.nchar_10.strip).must_equal "五六" + end + + it "nvarchar(50)" do + col = column("nvarchar_50") + _(col.sql_type).must_equal "nvarchar(50)" + _(col.type).must_equal :string + _(col.null).must_equal true + _(col.default).must_equal "test nvarchar_50 åå" + _(obj.nvarchar_50).must_equal "test nvarchar_50 åå" + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::UnicodeVarchar + _(type.limit).must_equal 50 + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Basic set and save. + assert_obj_set_and_save :nvarchar_50, "一二34五六" + end + + it "nvarchar(max)" do + col = column("nvarchar_max") + _(col.sql_type).must_equal "nvarchar(max)" + _(col.type).must_equal :text + _(col.null).must_equal true + _(col.default).must_equal "test nvarchar_max åå" + _(obj.nvarchar_max).must_equal "test nvarchar_max åå" + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::UnicodeVarcharMax + _(type.limit).must_equal 2_147_483_647 + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Basic set and save. + assert_obj_set_and_save :nvarchar_max, "一二34五六" + end + + it "ntext" do + col = column("ntext") + _(col.sql_type).must_equal "ntext" + _(col.type).must_equal :ntext + _(col.null).must_equal true + _(col.default).must_equal "test ntext åå" + _(obj.ntext).must_equal "test ntext åå" + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::UnicodeText + _(type.limit).must_equal 2_147_483_647 + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Basic set and save. + assert_obj_set_and_save :ntext, "一二34五六" + end + + # Binary Strings + + let(:binary_file) { File.join ARTest::SQLServer.test_root_sqlserver, "fixtures", "1px.gif" } + let(:binary_data) { File.binread(binary_file) } + + it "binary(49)" do + col = column("binary_49") + _(col.sql_type).must_equal "binary(49)" + _(col.type).must_equal :binary_basic + _(col.null).must_equal true + _(col.default).must_be_nil + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Binary + _(type.limit).must_equal 49 + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Basic set and save. + _(binary_data.encoding).must_equal Encoding::BINARY + _(binary_data.length).must_equal 49 + obj.binary_49 = binary_data + _(obj.binary_49).must_equal binary_data + obj.save! + _(obj.reload.binary_49).must_equal binary_data + end + + it "varbinary(49)" do + col = column("varbinary_49") + _(col.sql_type).must_equal "varbinary(49)" + _(col.type).must_equal :varbinary + _(col.null).must_equal true + _(col.default).must_be_nil + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Varbinary + _(type.limit).must_equal 49 + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Basic set and save. + binary_data20 = binary_data.to(20) + _(binary_data20.encoding).must_equal Encoding::BINARY + obj.varbinary_49 = binary_data20 + _(obj.varbinary_49).must_equal binary_data20 + obj.save! + _(obj.reload.varbinary_49).must_equal binary_data20 + end + + it "varbinary(max)" do + col = column("varbinary_max") + _(col.sql_type).must_equal "varbinary(max)" + _(col.type).must_equal :binary + _(col.null).must_equal true + _(col.default).must_be_nil + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::VarbinaryMax + _(type.limit).must_equal 2_147_483_647 + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Basic set and save. + _(binary_data.encoding).must_equal Encoding::BINARY + assert_obj_set_and_save :varbinary_max, binary_data + end + + # Other Data Types + + it "uniqueidentifier" do + col = column("uniqueidentifier") + _(col.sql_type).must_equal "uniqueidentifier" + _(col.type).must_equal :uuid + _(col.null).must_equal true + _(col.default).must_be_nil + _(col.default_function).must_equal "newid()" + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Uuid + _(type.limit).must_be_nil + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Basic set and save. + obj.uniqueidentifier = "this will not qualify as valid" + _(obj.uniqueidentifier).must_be_nil + obj.save! + obj.reload + _(obj.uniqueidentifier).must_match ActiveRecord::ConnectionAdapters::SQLServer::Type::Uuid::ACCEPTABLE_UUID + obj.uniqueidentifier = "6F9619FF-8B86-D011-B42D-00C04FC964FF" + _(obj.uniqueidentifier).must_equal "6F9619FF-8B86-D011-B42D-00C04FC964FF" + obj.save! + obj.reload + _(obj.uniqueidentifier).must_equal "6F9619FF-8B86-D011-B42D-00C04FC964FF" + end + + it "timestamp" do + col = column("timestamp") + _(col.sql_type).must_equal "timestamp" + _(col.type).must_equal :ss_timestamp + _(col.null).must_equal true + _(col.default).must_be_nil + _(col.default_function).must_be_nil + type = col.fetch_cast_type(connection) + _(type).must_be_instance_of ActiveRecord::ConnectionAdapters::SQLServer::Type::Timestamp + _(type.limit).must_be_nil + _(type.precision).must_be_nil + _(type.scale).must_be_nil + # Basic read. + _(obj.timestamp).must_be_nil + obj.save! + obj.reload + _(obj.timestamp).must_match %r{\000} + obj.timestamp + # Can set another attribute + obj.uniqueidentifier = "6F9619FF-8B86-D011-B42D-00C04FC964FF" + obj.save! + end + + it "does not mark object as changed after save" do + obj.save! + obj.attributes + _(obj.changed?).must_equal false + end end - - end diff --git a/test/cases/connection_test_sqlserver.rb b/test/cases/connection_test_sqlserver.rb index c5cff71ac..da3e8e8b5 100644 --- a/test/cases/connection_test_sqlserver.rb +++ b/test/cases/connection_test_sqlserver.rb @@ -1,305 +1,69 @@ -require 'cases/sqlserver_helper' -require 'models/reply' +# frozen_string_literal: true + +require "cases/helper_sqlserver" +require "models/reply" +require "models/topic" + +class ConnectionTestSQLServer < ActiveRecord::TestCase + self.use_transactional_tests = false -class ConnectionTestSqlserver < ActiveRecord::TestCase - - self.use_transactional_fixtures = false - fixtures :topics, :accounts - - def setup - @connection = ActiveRecord::Base.connection + + before do + connection.reconnect! + assert connection.active? end - - should 'affect rows' do - topic_data = { 1 => { "content" => "1 updated" }, 2 => { "content" => "2 updated" } } + + it "affect rows" do + topic_data = {1 => {"content" => "1 updated"}, 2 => {"content" => "2 updated"}} updated = Topic.update(topic_data.keys, topic_data.values) assert_equal 2, updated.size assert_equal "1 updated", Topic.find(1).content assert_equal "2 updated", Topic.find(2).content - assert_equal 2, Topic.delete([1, 2]) + assert_equal 2, Topic.delete([1, 2]) end - - should 'allow usage of :database connection option to remove setting from dsn' do - assert_equal 'activerecord_unittest', @connection.current_database - begin - @connection.use_database('activerecord_unittest2') - assert_equal 'activerecord_unittest2', @connection.current_database - ensure - @connection.use_database - assert_equal 'activerecord_unittest', @connection.current_database, 'Would default back to connection options' - end - end unless sqlserver_azure? - - context 'ODBC connection management' do - - should 'return finished ODBC statement handle from #execute without block' do - assert_all_odbc_statements_used_are_closed do - @connection.execute('SELECT * FROM [topics]') - end - end - - should 'finish ODBC statement handle from #execute with block' do - assert_all_odbc_statements_used_are_closed do - @connection.execute('SELECT * FROM [topics]') { } - end - end - - should 'finish connection from #raw_select' do - assert_all_odbc_statements_used_are_closed do - @connection.send(:raw_select,'SELECT * FROM [topics]') - end - end - should 'execute without block closes statement' do - assert_all_odbc_statements_used_are_closed do - @connection.execute("SELECT 1") - end - end - - should 'execute with block closes statement' do - assert_all_odbc_statements_used_are_closed do - @connection.execute("SELECT 1") do |sth| - assert !sth.finished?, "Statement should still be alive within block" - end - end - end - - should 'insert with identity closes statement' do - assert_all_odbc_statements_used_are_closed do - @connection.exec_insert "INSERT INTO accounts ([id],[firm_id],[credit_limit]) VALUES (999, 1, 50)", "SQL", [] + unless connection_sqlserver_azure? + it "allow usage of :database connection option to remove setting from dsn" do + assert_equal "activerecord_unittest", connection.current_database + begin + connection.use_database("activerecord_unittest2") + assert_equal "activerecord_unittest2", connection.current_database + ensure + connection.use_database + assert_equal "activerecord_unittest", connection.current_database, "Would default back to connection options" end end + end - should 'insert without identity closes statement' do - assert_all_odbc_statements_used_are_closed do - @connection.exec_insert "INSERT INTO accounts ([firm_id],[credit_limit]) VALUES (1, 50)", "SQL", [] - end + describe "Connection management" do + it "set spid on connect" do + _(["Fixnum", "Integer"]).must_include connection.spid.class.name end - should 'active closes statement' do - assert_all_odbc_statements_used_are_closed do - @connection.active? - end + it "reset spid on disconnect!" do + connection.disconnect! + assert connection.spid.nil? end - end if connection_mode_odbc? - - - context 'Connection management' do - - setup do - assert @connection.active? - end - - should 'set spid on connect' do - assert_instance_of Fixnum, @connection.spid - end - - should 'reset spid on disconnect!' do - @connection.disconnect! - assert @connection.spid.nil? - end - - should 'be able to disconnect and reconnect at will' do - @connection.disconnect! - assert !@connection.active? - @connection.reconnect! - assert @connection.active? - end - - should 'auto reconnect when setting is on' do - with_auto_connect(true) do - @connection.disconnect! - assert_nothing_raised() { Topic.count } - assert @connection.active? - end + it "reset raw connection on disconnect!" do + connection.disconnect! + _(connection.instance_variable_get(:@raw_connection)).must_be_nil end - - should 'not auto reconnect when setting is off' do - with_auto_connect(false) do - @connection.disconnect! - assert_raise(ActiveRecord::LostConnection) { Topic.count } - end - end - - should 'not auto reconnect on commit transaction' do - @connection.disconnect! - assert_raise(ActiveRecord::LostConnection) { @connection.commit_db_transaction } - end - - should 'gracefully ignore lost connections on rollback transaction' do - @connection.disconnect! - assert_nothing_raised { @connection.rollback_db_transaction } - end - - should 'not auto reconnect on create savepoint' do - @connection.disconnect! - assert_raise(ActiveRecord::LostConnection) { @connection.create_savepoint } - end - - should 'not auto reconnect on rollback to savepoint ' do - @connection.disconnect! - assert_raise(ActiveRecord::LostConnection) { @connection.rollback_to_savepoint } - end - - context 'testing #disable_auto_reconnect' do - - should 'when auto reconnect setting is on' do - with_auto_connect(true) do - @connection.send(:disable_auto_reconnect) do - assert !@connection.class.auto_connect - end - assert @connection.class.auto_connect - end - end - - should 'when auto reconnect setting is off' do - with_auto_connect(false) do - @connection.send(:disable_auto_reconnect) do - assert !@connection.class.auto_connect - end - assert !@connection.class.auto_connect - end - end - - end - - context 'with a deadlock victim exception (1205)' do - - context 'outside a transaction' do - - setup do - @query = "SELECT 1 as [one]" - @expected = @connection.execute(@query) - # Execute the query to get a handle of the expected result, which - # will be returned after a simulated deadlock victim (1205). - raw_conn = @connection.instance_variable_get(:@connection) - stubbed_handle = raw_conn.execute(@query) - @connection.send(:finish_statement_handle, stubbed_handle) - raw_conn.stubs(:execute).raises(deadlock_victim_exception(@query)).then.returns(stubbed_handle) - end - - teardown do - @connection.class.retry_deadlock_victim = nil - end - - should 'retry by default' do - assert_nothing_raised do - assert_equal @expected, @connection.execute(@query) - end - end - - should 'raise ActiveRecord::DeadlockVictim if retry is disabled' do - @connection.class.retry_deadlock_victim = false - assert_raise(ActiveRecord::DeadlockVictim) do - assert_equal @expected, @connection.execute(@query) - end - end - end - - context 'within a transaction' do - - setup do - @query = "SELECT 1 as [one]" - @expected = @connection.execute(@query) - # We "stub" the execute method to simulate raising a deadlock victim exception once. - @connection.class.class_eval do - def execute_with_deadlock_exception(sql, *args) - if !@raised_deadlock_exception && sql == "SELECT 1 as [one]" - sql = "RAISERROR('Transaction (Process ID #{Process.pid}) was deadlocked on lock resources with another process and has been chosen as the deadlock victim. Rerun the transaction.: #{sql}', 13, 1)" - @raised_deadlock_exception = true - elsif @raised_deadlock_exception == true && sql =~ /RAISERROR\('Transaction \(Process ID \d+\) was deadlocked on lock resources with another process and has been chosen as the deadlock victim\. Rerun the transaction\.: SELECT 1 as \[one\]', 13, 1\)/ - sql = "SELECT 1 as [one]" - end - execute_without_deadlock_exception(sql, *args) - end - alias :execute_without_deadlock_exception :execute - alias :execute :execute_with_deadlock_exception - end - end - - teardown do - # Cleanup the "stubbed" execute method. - @connection.class.class_eval do - alias :execute :execute_without_deadlock_exception - remove_method :execute_with_deadlock_exception - remove_method :execute_without_deadlock_exception - end - @connection.send(:remove_instance_variable, :@raised_deadlock_exception) - @connection.class.retry_deadlock_victim = nil - end - - should 'retry by default' do - assert_nothing_raised do - ActiveRecord::Base.transaction do - assert_equal @expected, @connection.execute(@query) - end - end - end - - should 'raise ActiveRecord::DeadlockVictim if retry disabled' do - @connection.class.retry_deadlock_victim = false - assert_raise(ActiveRecord::DeadlockVictim) do - ActiveRecord::Base.transaction do - assert_equal @expected, @connection.execute(@query) - end - end - end - - end - - end if connection_mode_dblib? # Since it is easier to test, but feature should work in ODBC too. - - end - - context 'Diagnostics' do - - should 'testing #activity_stats' do - stats = @connection.activity_stats - assert stats.length > 0 - assert stats.all? { |s| s.has_key?("session_id") } - assert stats.all? { |s| s["database"] == @connection.current_database } + it "be able to disconnect and reconnect at will" do + disconnect_raw_connection! + assert !connection.active? + connection.reconnect! + assert connection.active? end - end - - - + private - - def assert_all_odbc_statements_used_are_closed(&block) - odbc = @connection.raw_connection.class.parent - existing_handles = [] - ObjectSpace.each_object(odbc::Statement) { |h| existing_handles << h } - existing_handle_ids = existing_handles.map(&:object_id) - assert existing_handles.all?(&:finished?), "Somewhere before the block some statements were not closed" - GC.disable - yield - used_handles = [] - ObjectSpace.each_object(odbc::Statement) { |h| used_handles << h unless existing_handle_ids.include?(h.object_id) } - assert used_handles.size > 0, "No statements were used within given block" - assert used_handles.all?(&:finished?), "Statement should have been closed within given block" - ensure - GC.enable - end - - def deadlock_victim_exception(sql) - require 'tiny_tds/error' - error = TinyTds::Error.new("Transaction (Process ID #{Process.pid}) was deadlocked on lock resources with another process and has been chosen as the deadlock victim. Rerun the transaction.: #{sql}") - error.severity = 13 - error.db_error_number = 1205 - error - end - - def with_auto_connect(boolean) - existing = ActiveRecord::ConnectionAdapters::SQLServerAdapter.auto_connect - ActiveRecord::ConnectionAdapters::SQLServerAdapter.auto_connect = boolean - yield - ensure - ActiveRecord::ConnectionAdapters::SQLServerAdapter.auto_connect = existing + def disconnect_raw_connection! + connection.raw_connection.close + rescue + nil end - end diff --git a/test/cases/dbconsole.rb b/test/cases/dbconsole.rb new file mode 100644 index 000000000..c23e53f0e --- /dev/null +++ b/test/cases/dbconsole.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class DbConsole < ActiveRecord::TestCase + subject { ActiveRecord::ConnectionAdapters::OracleEnhancedAdapter } + + it "uses sqlplus to connect to database" do + subject.expects(:find_cmd_and_exec).with("sqlcmd", "-d", "db", "-U", "user", "-P", "secret", "-S", "tcp:localhost,1433") + + config = make_db_config(adapter: "sqlserver", database: "db", username: "user", password: "secret", host: "localhost", port: 1433) + + subject.dbconsole(config) + end + + private + + def make_db_config(config) + ActiveRecord::DatabaseConfigurations::HashConfig.new("test", "primary", config) + end +end diff --git a/test/cases/disconnected_test_sqlserver.rb b/test/cases/disconnected_test_sqlserver.rb new file mode 100644 index 000000000..d3e89fbff --- /dev/null +++ b/test/cases/disconnected_test_sqlserver.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class TestDisconnectedAdapter < ActiveRecord::TestCase + self.use_transactional_tests = false + + undef_method :setup + def setup + @connection = ActiveRecord::Base.lease_connection + end + + teardown do + return if in_memory_db? + db_config = ActiveRecord::Base.connection_db_config + ActiveRecord::Base.establish_connection(db_config) + end + + test "execute procedure after disconnect reconnects" do + @connection.execute_procedure :sp_tables, "sst_datatypes" + @connection.disconnect! + + assert_nothing_raised do + @connection.execute_procedure :sp_tables, "sst_datatypes" + end + end + + test "execute query after disconnect reconnects" do + sql = "SELECT count(*) from products WHERE id IN(@0, @1)" + binds = [ + ActiveRecord::Relation::QueryAttribute.new("id", 2, ActiveRecord::Type::BigInteger.new), + ActiveRecord::Relation::QueryAttribute.new("id", 2, ActiveRecord::Type::BigInteger.new) + ] + + @connection.exec_query sql, "TEST", binds + @connection.disconnect! + + assert_nothing_raised do + @connection.exec_query sql, "TEST", binds + end + end +end diff --git a/test/cases/eager_load_too_many_ids_test_sqlserver.rb b/test/cases/eager_load_too_many_ids_test_sqlserver.rb new file mode 100644 index 000000000..72ba24625 --- /dev/null +++ b/test/cases/eager_load_too_many_ids_test_sqlserver.rb @@ -0,0 +1,18 @@ +require "cases/helper_sqlserver" +require "models/citation" +require "models/book" + +class EagerLoadingTooManyIdsTest < ActiveRecord::TestCase + fixtures :citations + + def test_batch_preloading_too_many_ids + in_clause_length = 10_000 + + # We Monkey patch Preloader to work with batches of 10_000 records. + # Expect: N Books queries + Citation query + expected_query_count = (Citation.count / in_clause_length.to_f).ceil + 1 + assert_queries_count(expected_query_count) do + Citation.preload(:reference_of).to_a.size + end + end +end diff --git a/test/cases/eager_test_sqlserver.rb b/test/cases/eager_test_sqlserver.rb deleted file mode 100644 index c80988998..000000000 --- a/test/cases/eager_test_sqlserver.rb +++ /dev/null @@ -1,22 +0,0 @@ -require 'cases/sqlserver_helper' -require 'models/post' -require 'models/comment' -require 'models/author' - -class EagerAssociationTestSqlserver < ActiveRecord::TestCase -end - -class EagerAssociationTest < ActiveRecord::TestCase - - COERCED_TESTS = [:test_count_with_include] - - include SqlserverCoercedTest - - fixtures :posts, :comments, :authors - - def test_coerced_count_with_include - assert_equal 3, authors(:david).posts_with_comments.count(:conditions => "len(comments.body) > 15") - end - - -end diff --git a/test/cases/enum_test_sqlserver.rb b/test/cases/enum_test_sqlserver.rb new file mode 100644 index 000000000..8089e7068 --- /dev/null +++ b/test/cases/enum_test_sqlserver.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class EnumTestSQLServer < ActiveRecord::TestCase + # Check that enums are supported for all string types. + # For each type we check: cast, serialize, and update by declaration. + # We create a custom class for each type to test. + %w[char_10 varchar_50 varchar_max text nchar_10 nvarchar_50 nvarchar_max ntext].each do |col_name| + describe "support #{col_name} enums" do + let(:klass) do + Class.new(ActiveRecord::Base) do + self.table_name = "sst_datatypes" + + enum col_name, {alpha: "A", beta: "B"} + end + end + + it "type.cast" do + type = klass.type_for_attribute(col_name) + + assert_equal "alpha", type.cast("A") + assert_equal "beta", type.cast("B") + end + + it "type.serialize" do + type = klass.type_for_attribute(col_name) + + assert_equal "A", type.serialize("A") + assert_equal "B", type.serialize("B") + + assert_equal "A", type.serialize(:alpha) + assert_equal "B", type.serialize(:beta) + end + + it "update by declaration" do + r = klass.new + + r.alpha! + assert_predicate r, :alpha? + + r.beta! + assert_not_predicate r, :alpha? + assert_predicate r, :beta? + end + end + end +end diff --git a/test/cases/execute_procedure_test_sqlserver.rb b/test/cases/execute_procedure_test_sqlserver.rb index 46525c027..a47c8aa15 100644 --- a/test/cases/execute_procedure_test_sqlserver.rb +++ b/test/cases/execute_procedure_test_sqlserver.rb @@ -1,43 +1,57 @@ -require 'cases/sqlserver_helper' +# frozen_string_literal: true -class ExecuteProcedureTestSqlserver < ActiveRecord::TestCase - - def setup - @klass = ActiveRecord::Base - end - - should 'execute a simple procedure' do - tables = @klass.execute_procedure :sp_tables +require "cases/helper_sqlserver" + +class ExecuteProcedureTestSQLServer < ActiveRecord::TestCase + it "execute a simple procedure" do + tables = ActiveRecord::Base.execute_procedure :sp_tables assert_instance_of Array, tables assert tables.first.respond_to?(:keys) end - - should 'take parameter arguments' do - tables = @klass.execute_procedure :sp_tables, 'sql_server_chronics' + + it "take parameter arguments" do + tables = ActiveRecord::Base.execute_procedure :sp_tables, "sst_datatypes" table_info = tables.first assert_equal 1, tables.size - assert_equal (ENV['ARUNIT_DB_NAME'] || 'activerecord_unittest'), table_info['TABLE_QUALIFIER'], "Table Info: #{table_info.inspect}" - assert_equal 'TABLE', table_info['TABLE_TYPE'], "Table Info: #{table_info.inspect}" + assert_equal (ENV["ARUNIT_DB_NAME"] || "activerecord_unittest"), table_info["TABLE_QUALIFIER"], "Table Info: #{table_info.inspect}" + assert_equal "TABLE", table_info["TABLE_TYPE"], "Table Info: #{table_info.inspect}" end - - should 'allow multiple result sets to be returned' do - results1, results2 = @klass.execute_procedure('sp_helpconstraint','accounts') + + it "allow multiple result sets to be returned" do + results1, results2 = ActiveRecord::Base.execute_procedure("sp_helpconstraint", "accounts") assert_instance_of Array, results1 assert results1.first.respond_to?(:keys) - assert results1.first['Object Name'] + assert results1.first["Object Name"] assert_instance_of Array, results2 assert results2.first.respond_to?(:keys) - assert results2.first['constraint_name'] - assert results2.first['constraint_type'] + assert results2.first["constraint_name"] + assert results2.first["constraint_type"] end - should 'take named parameter arguments' do - tables = @klass.execute_procedure :sp_tables, :table_name => 'tables', :table_owner => 'sys' + it "take named parameter arguments" do + tables = ActiveRecord::Base.execute_procedure :sp_tables, table_name: "tables", table_owner: "sys" table_info = tables.first assert_equal 1, tables.size - assert_equal (ENV['ARUNIT_DB_NAME'] || 'activerecord_unittest'), table_info['TABLE_QUALIFIER'], "Table Info: #{table_info.inspect}" - assert_equal 'VIEW', table_info['TABLE_TYPE'], "Table Info: #{table_info.inspect}" + assert_equal (ENV["ARUNIT_DB_NAME"] || "activerecord_unittest"), table_info["TABLE_QUALIFIER"], "Table Info: #{table_info.inspect}" + assert_equal "VIEW", table_info["TABLE_TYPE"], "Table Info: #{table_info.inspect}" + end + + it "uses the proper timezone" do + date_proc = connection.execute_procedure("my_getutcdate").first["utcdate"] + date_base = connection.select_value("select GETUTCDATE()") + assert_equal date_base.change(usec: 0), date_proc.change(usec: 0) + end + + def transaction_with_procedure_and_return + ActiveRecord::Base.transaction do + connection.execute_procedure("my_getutcdate") + return + end + end + + it "test deprecation with transaction return when executing procedure" do + assert_not_deprecated(ActiveRecord.deprecator) do + transaction_with_procedure_and_return + end end - - end diff --git a/test/cases/fetch_test_sqlserver.rb b/test/cases/fetch_test_sqlserver.rb new file mode 100755 index 000000000..3a4dcc3ee --- /dev/null +++ b/test/cases/fetch_test_sqlserver.rb @@ -0,0 +1,88 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" +require "models/book" + +class FetchTestSqlserver < ActiveRecord::TestCase + let(:books) { @books } + + before { create_10_books } + + it "work with fully qualified table and columns in select" do + books = Book.select("books.id, books.name").limit(3).offset(5) + assert_equal Book.all[5, 3].map(&:id), books.map(&:id) + end + + describe "count" do + it "gauntlet" do + books[0].destroy + books[1].destroy + books[2].destroy + assert_equal 7, Book.count + assert_equal 1, Book.limit(1).offset(1).count + assert_equal 1, Book.limit(1).offset(5).count + assert_equal 1, Book.limit(1).offset(6).count + assert_equal 0, Book.limit(1).offset(7).count + assert_equal 3, Book.limit(3).offset(4).count + assert_equal 2, Book.limit(3).offset(5).count + assert_equal 1, Book.limit(3).offset(6).count + assert_equal 0, Book.limit(3).offset(7).count + assert_equal 0, Book.limit(3).offset(8).count + end + end + + describe "order" do + it "gauntlet" do + Book.where(name: "Name-10").delete_all + _(Book.order(:name).limit(1).offset(1).map(&:name)).must_equal ["Name-2"] + _(Book.order(:name).limit(2).offset(2).map(&:name)).must_equal ["Name-3", "Name-4"] + _(Book.order(:name).limit(2).offset(7).map(&:name)).must_equal ["Name-8", "Name-9"] + _(Book.order(:name).limit(3).offset(7).map(&:name)).must_equal ["Name-8", "Name-9"] + _(Book.order(:name).limit(3).offset(9).map(&:name)).must_equal [] + end + end + + describe "FROM subquery" do + let(:from_sql) { "(SELECT [books].* FROM [books]) [books]" } + + it "SQL generated correctly for FROM subquery if order provided" do + query = Book.from(from_sql).order(:id).limit(5) + + assert_equal query.to_sql, "SELECT [books].* FROM (SELECT [books].* FROM [books]) [books] ORDER BY [books].[id] ASC OFFSET 0 ROWS FETCH NEXT 5 ROWS ONLY" + assert_equal query.to_a.size, 5 + end + + it "exception thrown if FROM subquery is provided without an order" do + query = Book.from(from_sql).limit(5) + + assert_raise(ActiveRecord::StatementInvalid) do + query.to_sql + end + end + end + + protected + + def create_10_books + Book.delete_all + @books = (1..10).map { |i| Book.create! name: "Name-#{i}" } + end +end + +class DeterministicFetchWithCompositePkTestSQLServer < ActiveRecord::TestCase + it "orders by the identity column if table has one" do + SSCompositePkWithIdentity.delete_all + SSCompositePkWithIdentity.create(pk_col_two: 2) + SSCompositePkWithIdentity.create(pk_col_two: 1) + + _(SSCompositePkWithIdentity.take(1).map(&:pk_col_two)).must_equal [2] + end + + it "orders by the first column if table has no identity column" do + SSCompositePkWithoutIdentity.delete_all + SSCompositePkWithoutIdentity.create(pk_col_one: 2, pk_col_two: 2) + SSCompositePkWithoutIdentity.create(pk_col_one: 1, pk_col_two: 1) + + _(SSCompositePkWithoutIdentity.take(1).map(&:pk_col_two)).must_equal [1] + end +end diff --git a/test/cases/finder_test_sqlserver.rb b/test/cases/finder_test_sqlserver.rb deleted file mode 100644 index aca251ad3..000000000 --- a/test/cases/finder_test_sqlserver.rb +++ /dev/null @@ -1,34 +0,0 @@ -require 'cases/sqlserver_helper' -require 'models/event' - -class FinderTestSqlserver < ActiveRecord::TestCase -end - -class FinderTest < ActiveRecord::TestCase - - COERCED_TESTS = [ - :test_exists_does_not_select_columns_without_alias, - :test_string_sanitation, - :test_first_and_last_with_integer_should_use_sql_limit - ] - - include SqlserverCoercedTest - - def test_coerced_exists_does_not_select_columns_without_alias - assert_sql(/SELECT TOP \(1\) 1 AS one FROM \[topics\]/i) do - Topic.exists? - end - end - - def test_coerced_string_sanitation - assert_not_equal "N'something ' 1=1'", ActiveRecord::Base.sanitize("something ' 1=1") - assert_equal "N'something; select table'", ActiveRecord::Base.sanitize("something; select table") - end - - def test_coerced_first_and_last_with_integer_should_use_sql_limit - assert_sql(/TOP \(2\)/) { Topic.first(2).entries } - assert_sql(/TOP \(5\)/) { Topic.last(5).entries } - end - -end - diff --git a/test/cases/fully_qualified_identifier_test_sqlserver.rb b/test/cases/fully_qualified_identifier_test_sqlserver.rb new file mode 100644 index 000000000..73c173b71 --- /dev/null +++ b/test/cases/fully_qualified_identifier_test_sqlserver.rb @@ -0,0 +1,72 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class FullyQualifiedIdentifierTestSQLServer < ActiveRecord::TestCase + describe "local server" do + it "should use table name in select projections" do + table = Arel::Table.new(:table) + expected_sql = "SELECT [table].[name] FROM [table]" + assert_equal expected_sql, table.project(table[:name]).to_sql + end + end + + describe "remote server" do + before do + connection_options[:database_prefix] = "[my.server].db.schema." + end + + after do + connection_options.delete :database_prefix + end + + it "should use fully qualified table name in select from clause" do + table = Arel::Table.new(:table) + expected_sql = "SELECT * FROM [my.server].[db].[schema].[table]" + assert_equal expected_sql, table.project(Arel.star).to_sql + end + + it "should not use fully qualified table name in select projections" do + table = Arel::Table.new(:table) + expected_sql = "SELECT [table].[name] FROM [my.server].[db].[schema].[table]" + assert_equal expected_sql, table.project(table[:name]).to_sql + end + + it "should not use fully qualified table name in where clause" do + table = Arel::Table.new(:table) + expected_sql = "SELECT * FROM [my.server].[db].[schema].[table] WHERE [table].[id] = 42" + quietly { assert_equal expected_sql, table.project(Arel.star).where(table[:id].eq(42)).to_sql } + end + + it "should not use fully qualified table name in order clause" do + table = Arel::Table.new(:table) + expected_sql = "SELECT * FROM [my.server].[db].[schema].[table] ORDER BY [table].[name]" + assert_equal expected_sql, table.project(Arel.star).order(table[:name]).to_sql + end + + it "should use fully qualified table name in insert statement" do + manager = Arel::InsertManager.new + manager.into Arel::Table.new(:table) + manager.values = manager.create_values [Arel.sql("*")] + expected_sql = "INSERT INTO [my.server].[db].[schema].[table] VALUES (*)" + quietly { assert_equal expected_sql, manager.to_sql } + end + + it "should use fully qualified table name in update statement" do + table = Arel::Table.new(:table) + manager = Arel::UpdateManager.new + manager.table(table).where(table[:id].eq(42)) + manager.set([[table[:name], "Bob"]]) + expected_sql = "UPDATE [my.server].[db].[schema].[table] SET [name] = N'Bob' WHERE [table].[id] = 42" + quietly { assert_equal expected_sql, manager.to_sql } + end + + it "should use fully qualified table name in delete statement" do + table = Arel::Table.new(:table) + manager = Arel::DeleteManager.new + manager.from(table).where(table[:id].eq(42)) + expected_sql = "DELETE FROM [my.server].[db].[schema].[table] WHERE [table].[id] = 42" + quietly { assert_equal expected_sql, manager.to_sql } + end + end +end diff --git a/test/cases/has_and_belongs_to_many_associations_test_sqlserver.rb b/test/cases/has_and_belongs_to_many_associations_test_sqlserver.rb deleted file mode 100644 index 568e22e30..000000000 --- a/test/cases/has_and_belongs_to_many_associations_test_sqlserver.rb +++ /dev/null @@ -1,24 +0,0 @@ -require 'cases/sqlserver_helper' - -class HasAndBelongsToManyAssociationsTestSqlserver < ActiveRecord::TestCase -end - -class HasAndBelongsToManyAssociationsTest < ActiveRecord::TestCase - - COERCED_TESTS = [ - :test_count_with_finder_sql, - :test_caching_of_columns - ] - - include SqlserverCoercedTest - - def test_coerced_count_with_finder_sql - assert true - end - - def test_coerced_caching_of_columns - assert true - end - - -end diff --git a/test/cases/helper_sqlserver.rb b/test/cases/helper_sqlserver.rb new file mode 100644 index 000000000..64c433cf3 --- /dev/null +++ b/test/cases/helper_sqlserver.rb @@ -0,0 +1,73 @@ +# frozen_string_literal: true + +require "support/paths_sqlserver" +require "bundler/setup" +Bundler.require :default, :development +require "pry" +require "support/core_ext/query_cache" +require "support/core_ext/backtrace_cleaner" +require "support/minitest_sqlserver" +require "support/test_in_memory_oltp" +require "support/table_definition_sqlserver" +require "cases/helper" +require "support/load_schema_sqlserver" +require "support/coerceable_test_sqlserver" +require "support/connection_reflection" +require "support/query_assertions" +require "mocha/minitest" + +Minitest.after_run do + puts "\n\n" + puts "=" * 80 + puts "Ruby Version: #{RUBY_VERSION}p#{RUBY_PATCHLEVEL} (#{RUBY_RELEASE_DATE}) [#{RUBY_PLATFORM}]" + puts "\n\n" + puts "=" * 80 + puts ActiveRecord::Base.lease_connection.send(:sqlserver_version) + puts "\nSQL Server Version Year: #{ActiveRecord::Base.lease_connection.get_database_version}" + puts "=" * 80 +end + +module ActiveSupport + class TestCase < ::Minitest::Test + include ARTest::SQLServer::CoerceableTest + end +end + +module ActiveRecord + class TestCase < ActiveSupport::TestCase + SQLServer = ActiveRecord::ConnectionAdapters::SQLServer + + include ARTest::SQLServer::QueryAssertions + include ActiveSupport::Testing::Stream + include ARTest::SQLServer::ConnectionReflection + + let(:logger) { ActiveRecord::Base.logger } + + setup :ensure_clean_rails_env + setup :remove_backtrace_silencers + + private + + def ensure_clean_rails_env + Rails.instance_variable_set(:@_env, nil) if defined?(::Rails) + end + + def remove_backtrace_silencers + Rails.backtrace_cleaner.remove_silencers! + end + + def host_windows? + RbConfig::CONFIG["host_os"] =~ /mswin|mingw/ + end + + def with_use_output_inserted_disabled + klass = ActiveRecord::ConnectionAdapters::SQLServerAdapter + klass.use_output_inserted = false + yield + ensure + klass.use_output_inserted = true + end + end +end + +Dir["#{ARTest::SQLServer.test_root_sqlserver}/models/**/*.rb"].each { |f| require f } diff --git a/test/cases/in_clause_test_sqlserver.rb b/test/cases/in_clause_test_sqlserver.rb new file mode 100644 index 000000000..539d57ae9 --- /dev/null +++ b/test/cases/in_clause_test_sqlserver.rb @@ -0,0 +1,63 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" +require "models/post" +require "models/author" + +class InClauseTestSQLServer < ActiveRecord::TestCase + fixtures :posts, :authors + + it "removes ordering from subqueries" do + authors_subquery = Author.where(name: ["David", "Mary", "Bob"]).order(:name) + posts = Post.where(author: authors_subquery) + + assert_includes authors_subquery.to_sql, "ORDER BY [authors].[name]" + assert_not_includes posts.to_sql, "ORDER BY [authors].[name]" + assert_equal 10, posts.length + end + + it "does not remove ordering from subquery that includes a limit" do + authors_subquery = Author.where(name: ["David", "Mary", "Bob"]).order(:name).limit(2) + posts = Post.where(author: authors_subquery) + + assert_includes authors_subquery.to_sql, "ORDER BY [authors].[name]" + assert_includes posts.to_sql, "ORDER BY [authors].[name]" + assert_equal 7, posts.length + end + + it "does not remove ordering from subquery that includes an offset" do + authors_subquery = Author.where(name: ["David", "Mary", "Bob"]).order(:name).offset(1) + posts = Post.where(author: authors_subquery) + + assert_includes authors_subquery.to_sql, "ORDER BY [authors].[name]" + assert_includes posts.to_sql, "ORDER BY [authors].[name]" + assert_equal 8, posts.length + end + + it "removes ordering from 'not' subqueries" do + authors_subquery = Author.where.not(name: ["Mary", "Bob"]).order(:name) + posts = Post.where(author: authors_subquery) + + assert_includes authors_subquery.to_sql, "ORDER BY [authors].[name]" + assert_not_includes posts.to_sql, "ORDER BY [authors].[name]" + assert_equal 5, posts.length + end + + it "does not remove ordering from 'not' subquery that includes a limit" do + authors_subquery = Author.where.not(name: ["Ronan", "Mary", "Bob"]).order(:name).limit(2) + posts = Post.where(author: authors_subquery) + + assert_includes authors_subquery.to_sql, "ORDER BY [authors].[name]" + assert_includes posts.to_sql, "ORDER BY [authors].[name]" + assert_equal 5, posts.length + end + + it "does not remove ordering from 'not' subquery that includes an offset" do + authors_subquery = Author.where.not(name: ["David", "Ronan", "Cian"]).order(:name).offset(1) + posts = Post.where(author: authors_subquery) + + assert_includes authors_subquery.to_sql, "ORDER BY [authors].[name]" + assert_includes posts.to_sql, "ORDER BY [authors].[name]" + assert_equal 3, posts.length + end +end diff --git a/test/cases/index_test_sqlserver.rb b/test/cases/index_test_sqlserver.rb new file mode 100644 index 000000000..6e3a72082 --- /dev/null +++ b/test/cases/index_test_sqlserver.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class IndexTestSQLServer < ActiveRecord::TestCase + before do + connection.create_table(:testings) do |t| + t.column :foo, :string, limit: 100 + t.column :bar, :string, limit: 100 + t.string :first_name + t.string :last_name, limit: 100 + t.string :key, limit: 100 + t.boolean :administrator + end + end + + after do + connection.drop_table :testings + rescue + nil + end + + it "add index with order" do + assert_queries_match(/CREATE.*INDEX.*\(\[last_name\] DESC\)/i) do + connection.add_index "testings", ["last_name"], order: {last_name: :desc} + connection.remove_index "testings", ["last_name"] + end + assert_queries_match(/CREATE.*INDEX.*\(\[last_name\] DESC, \[first_name\]\)/i) do + connection.add_index "testings", ["last_name", "first_name"], order: {last_name: :desc} + connection.remove_index "testings", ["last_name", "first_name"] + end + assert_queries_match(/CREATE.*INDEX.*\(\[last_name\] DESC, \[first_name\] ASC\)/i) do + connection.add_index "testings", ["last_name", "first_name"], order: {last_name: :desc, first_name: :asc} + connection.remove_index "testings", ["last_name", "first_name"] + end + end + + it "add index with where" do + assert_queries_match(/CREATE.*INDEX.*\(\[last_name\]\) WHERE \[first_name\] = N'john doe'/i) do + connection.add_index "testings", "last_name", where: "[first_name] = N'john doe'" + connection.remove_index "testings", "last_name" + end + end + + it "add index with expression" do + assert_nothing_raised do + connection.execute "ALTER TABLE [testings] ADD [first_name_upper] AS UPPER([first_name])" + connection.add_index "testings", "first_name_upper" + end + end +end diff --git a/test/cases/inheritance_test_sqlserver.rb b/test/cases/inheritance_test_sqlserver.rb deleted file mode 100644 index 9b4bc021f..000000000 --- a/test/cases/inheritance_test_sqlserver.rb +++ /dev/null @@ -1,37 +0,0 @@ -require 'cases/sqlserver_helper' -require 'models/company' -require 'models/project' -require 'models/subscriber' - -class InheritanceTestSqlserver < ActiveRecord::TestCase -end - -class InheritanceTest < ActiveRecord::TestCase - - fixtures :companies, :projects, :subscribers, :accounts - - COERCED_TESTS = [ - :test_a_bad_type_column, - :test_eager_load_belongs_to_primary_key_quoting - ] - - include SqlserverCoercedTest - - def test_coerced_a_bad_type_column - Company.connection.execute "SET IDENTITY_INSERT [companies] ON" - Company.connection.insert "INSERT INTO companies ([id], [type], [name]) VALUES(100, N'bad_class!', N'Not happening')" - Company.connection.execute "SET IDENTITY_INSERT [companies] OFF" - assert_raise(ActiveRecord::SubclassNotFound) { Company.find(100) } - end - - def test_coerced_eager_load_belongs_to_primary_key_quoting - con = Account.connection - assert_sql(/\[companies\]\.\[id\] IN \(N''1''\)/) do - Account.find(1, :include => :firm) - end - end - - -end - - diff --git a/test/cases/insert_all_test_sqlserver.rb b/test/cases/insert_all_test_sqlserver.rb new file mode 100644 index 000000000..ee17d5500 --- /dev/null +++ b/test/cases/insert_all_test_sqlserver.rb @@ -0,0 +1,20 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" +require "models/sqlserver/recurring_task" + +class InsertAllTestSQLServer < ActiveRecord::TestCase + test "upsert_all recording of timestamps works with mixed datatypes" do + task = RecurringTask.create!( + key: "abcdef", + priority: 5 + ) + + RecurringTask.upsert_all([{ + id: task.id, + priority: nil + }]) + + assert_not_equal task.updated_at, RecurringTask.find(task.id).updated_at + end +end diff --git a/test/cases/json_test_sqlserver.rb b/test/cases/json_test_sqlserver.rb new file mode 100644 index 000000000..a1691b861 --- /dev/null +++ b/test/cases/json_test_sqlserver.rb @@ -0,0 +1,32 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +if ActiveRecord::Base.lease_connection.supports_json? + class JsonTestSQLServer < ActiveRecord::TestCase + before do + @o1 = SSTestDatatypeMigrationJson.create! json_col: {"a" => "a", "b" => "b", "c" => "c"} + @o2 = SSTestDatatypeMigrationJson.create! json_col: {"a" => nil, "b" => "b", "c" => "c"} + @o3 = SSTestDatatypeMigrationJson.create! json_col: {"x" => 1, "y" => 2, "z" => 3} + @o4 = SSTestDatatypeMigrationJson.create! json_col: {"array" => [1, 2, 3]} + @o5 = SSTestDatatypeMigrationJson.create! json_col: nil + end + + it "can return and save JSON data" do + _(SSTestDatatypeMigrationJson.find(@o1.id).json_col).must_equal({"a" => "a", "b" => "b", "c" => "c"}) + @o1.json_col = {"a" => "a"} + _(@o1.json_col).must_equal({"a" => "a"}) + @o1.save! + _(@o1.reload.json_col).must_equal({"a" => "a"}) + end + + it "can use ISJSON function" do + _(SSTestDatatypeMigrationJson.where("ISJSON(json_col) > 0").count).must_equal 4 + _(SSTestDatatypeMigrationJson.where("ISJSON(json_col) IS NULL").count).must_equal 1 + end + + it "can use JSON_VALUE function" do + _(SSTestDatatypeMigrationJson.where("JSON_VALUE(json_col, '$.b') = 'b'").count).must_equal 2 + end + end +end diff --git a/test/cases/lateral_test_sqlserver.rb b/test/cases/lateral_test_sqlserver.rb new file mode 100644 index 000000000..c672576b3 --- /dev/null +++ b/test/cases/lateral_test_sqlserver.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" +require "models/post" +require "models/author" + +class LateralTestSQLServer < ActiveRecord::TestCase + fixtures :posts, :authors + + it "uses OUTER APPLY for OUTER JOIN LATERAL" do + post = Arel::Table.new(:posts) + author = Arel::Table.new(:authors) + subselect = post.project(Arel.star).take(1).where(post[:author_id].eq(author[:id])).where(post[:id].eq(42)) + + one = Arel::Nodes::Quoted.new(1) + eq = Arel::Nodes::Equality.new(one, one) + + sql = author.project(Arel.star).where(author[:name].matches("David")).outer_join(subselect.lateral.as("bar")).on(eq).to_sql + results = ActiveRecord::Base.lease_connection.exec_query sql + assert_equal sql, "SELECT * FROM [authors] OUTER APPLY (SELECT * FROM [posts] WHERE [posts].[author_id] = [authors].[id] AND [posts].[id] = 42 ORDER BY [posts].[id] ASC OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY) AS bar WHERE [authors].[name] LIKE N'David'" + assert_equal results.length, 1 + end + + it "uses CROSS APPLY for INNER JOIN LATERAL" do + post = Arel::Table.new(:posts) + author = Arel::Table.new(:authors) + subselect = post.project(Arel.star).take(1).where(post[:author_id].eq(author[:id])).where(post[:id].eq(42)) + + sql = author.project(Arel.star).where(author[:name].matches("David")).join(subselect.lateral.as("bar")).to_sql + results = ActiveRecord::Base.lease_connection.exec_query sql + + assert_equal sql, "SELECT * FROM [authors] CROSS APPLY (SELECT * FROM [posts] WHERE [posts].[author_id] = [authors].[id] AND [posts].[id] = 42 ORDER BY [posts].[id] ASC OFFSET 0 ROWS FETCH NEXT 1 ROWS ONLY) AS bar WHERE [authors].[name] LIKE N'David'" + assert_equal results.length, 0 + end +end diff --git a/test/cases/method_scoping_test_sqlserver.rb b/test/cases/method_scoping_test_sqlserver.rb deleted file mode 100644 index 921b46133..000000000 --- a/test/cases/method_scoping_test_sqlserver.rb +++ /dev/null @@ -1,29 +0,0 @@ -require 'cases/sqlserver_helper' -require 'models/developer' - -class MethodScopingTestSqlServer < ActiveRecord::TestCase -end - -class NestedScopingTest < ActiveRecord::TestCase - - COERCED_TESTS = [:test_merged_scoped_find] - - include SqlserverCoercedTest - - fixtures :developers - - - def test_coerced_test_merged_scoped_find - poor_jamis = developers(:poor_jamis) - Developer.send(:with_scope, :find => { :conditions => "salary < 100000" }) do - Developer.send(:with_scope, :find => { :offset => 1, :order => 'id asc' }) do - assert_sql /ORDER BY id asc/i do - assert_equal(poor_jamis, Developer.find(:first, :order => 'id asc')) - end - end - end - end - -end - - diff --git a/test/cases/migration_test_sqlserver.rb b/test/cases/migration_test_sqlserver.rb index eca1f3364..b829c8632 100644 --- a/test/cases/migration_test_sqlserver.rb +++ b/test/cases/migration_test_sqlserver.rb @@ -1,84 +1,136 @@ -require 'cases/sqlserver_helper' -require 'models/person' +# frozen_string_literal: true -class MigrationTestSqlserver < ActiveRecord::TestCase - - def setup - @connection = ActiveRecord::Base.connection - end - - context 'For transactions' do - - setup do - @trans_test_table1 = 'sqlserver_trans_table1' - @trans_test_table2 = 'sqlserver_trans_table2' - @trans_tables = [@trans_test_table1,@trans_test_table2] +require "cases/helper_sqlserver" +require "models/person" + +class MigrationTestSQLServer < ActiveRecord::TestCase + describe "For transactions" do + before do + @trans_test_table1 = "sqlserver_trans_table1" + @trans_test_table2 = "sqlserver_trans_table2" + @trans_tables = [@trans_test_table1, @trans_test_table2] end - - teardown do + + after do @trans_tables.each do |table_name| - ActiveRecord::Migration.drop_table(table_name) if @connection.tables.include?(table_name) + ActiveRecord::Migration.drop_table(table_name) if connection.tables.include?(table_name) end end - - should 'not create a tables if error in migrations' do + + it "not create a tables if error in migrations" do begin - ActiveRecord::Migrator.up(SQLSERVER_MIGRATIONS_ROOT+'/transaction_table') - rescue Exception => e - assert_match %r|this and all later migrations canceled|, e.message + migrations_dir = File.join ARTest::SQLServer.migrations_root, "transaction_table" + quietly { ActiveRecord::MigrationContext.new(migrations_dir).up } + rescue => e + assert_match %r{this and all later migrations canceled}, e.message end - assert_does_not_contain @trans_test_table1, @connection.tables - assert_does_not_contain @trans_test_table2, @connection.tables + _(connection.tables).wont_include @trans_test_table1 + _(connection.tables).wont_include @trans_test_table2 end - end - - context 'For changing column' do - - should 'not raise exception when column contains default constraint' do - lock_version_column = Person.columns_hash['lock_version'] + + describe "For changing column" do + it "not raise exception when column contains default constraint" do + lock_version_column = Person.columns_hash["lock_version"] assert_equal :integer, lock_version_column.type assert lock_version_column.default.present? - assert_nothing_raised { @connection.change_column 'people', 'lock_version', :string } + assert_nothing_raised { connection.change_column "people", "lock_version", :string } Person.reset_column_information - lock_version_column = Person.columns_hash['lock_version'] + lock_version_column = Person.columns_hash["lock_version"] assert_equal :string, lock_version_column.type assert lock_version_column.default.nil? + assert_nothing_raised { connection.change_column "people", "lock_version", :integer } + Person.reset_column_information end - - should 'not drop the default contraint if just renaming' do - find_default = lambda do - @connection.execute_procedure(:sp_helpconstraint, 'defaults', 'nomsg').select do |row| - row['constraint_type'] == "DEFAULT on column decimal_number" - end.last + + it "not drop the default constraint if just renaming" do + find_default = lambda do + connection.execute_procedure(:sp_helpconstraint, "sst_string_defaults", "nomsg").reverse.find do |row| + row["constraint_type"] == "DEFAULT on column string_with_pretend_paren_three" + end end default_before = find_default.call - @connection.change_column :defaults, :decimal_number, :decimal, :precision => 4 + connection.change_column :sst_string_defaults, :string_with_pretend_paren_three, :string, limit: 255 default_after = find_default.call assert default_after - assert_equal default_before['constraint_keys'], default_after['constraint_keys'] + assert_equal default_before["constraint_keys"], default_after["constraint_keys"] + end + + it "change limit" do + assert_nothing_raised { connection.change_column :people, :lock_version, :integer, limit: 8 } + end + + it "change null and default" do + assert_nothing_raised { connection.change_column :people, :first_name, :text, null: true, default: nil } + end + + it "change collation" do + assert_nothing_raised { connection.change_column :sst_string_collation, :string_with_collation, :varchar, collation: :SQL_Latin1_General_CP437_BIN } + + SstStringCollation.reset_column_information + assert_equal "SQL_Latin1_General_CP437_BIN", SstStringCollation.columns_hash["string_with_collation"].collation end - end - -end -if ActiveRecord::TestCase.sqlserver_azure? - class MigrationTest < ActiveRecord::TestCase - COERCED_TESTS = [:test_migrator_db_has_no_schema_migrations_table] - include SqlserverCoercedTest - def test_coerced_test_migrator_db_has_no_schema_migrations_table ; assert true ; end + describe "#create_schema" do + it "creates a new schema" do + connection.create_schema("some schema") + + schemas = connection.exec_query("select name from sys.schemas").to_a + + assert_includes schemas, {"name" => "some schema"} + end + + it "creates a new schema with an owner" do + connection.create_schema("some schema", :guest) + + schemas = connection.exec_query("select name, principal_id from sys.schemas").to_a + + assert_includes schemas, {"name" => "some schema", "principal_id" => 2} + end end -end -class ChangeTableMigrationsTest < ActiveRecord::TestCase - COERCED_TESTS = [:test_string_creates_string_column] - include SqlserverCoercedTest - def test_coerced_string_creates_string_column - with_change_table do |t| - @connection.expects(:add_column).with(:delete_me, :foo, 'nvarchar(255)', {}) - @connection.expects(:add_column).with(:delete_me, :bar, 'nvarchar(255)', {}) - t.string :foo, :bar + describe "#change_table_schema" do + before { connection.create_schema("foo") } + + it "transfer the given table to the given schema" do + connection.change_table_schema("foo", "orders") + + assert connection.data_source_exists?("foo.orders") end end -end \ No newline at end of file + + describe "#drop_schema" do + before { connection.create_schema("some schema") } + + it "drops a schema" do + schemas = connection.exec_query("select name from sys.schemas").to_a + + assert_includes schemas, {"name" => "some schema"} + + connection.drop_schema("some schema") + + schemas = connection.exec_query("select name from sys.schemas").to_a + + refute_includes schemas, {"name" => "some schema"} + end + end + + describe "creating stored procedure" do + it "stored procedure contains inserts are created successfully" do + sql = <<-SQL + CREATE OR ALTER PROCEDURE do_some_task + AS + IF NOT EXISTS(SELECT * FROM sys.objects WHERE type = 'U' AND name = 'SomeTableName') + BEGIN + CREATE TABLE SomeTableName (SomeNum int PRIMARY KEY CLUSTERED); + INSERT INTO SomeTableName(SomeNum) VALUES(1); + END + SQL + + assert_nothing_raised { connection.execute(sql) } + ensure + connection.execute("DROP PROCEDURE IF EXISTS dbo.do_some_task;") + end + end +end diff --git a/test/cases/offset_and_limit_test_sqlserver.rb b/test/cases/offset_and_limit_test_sqlserver.rb deleted file mode 100644 index 2570d4cc5..000000000 --- a/test/cases/offset_and_limit_test_sqlserver.rb +++ /dev/null @@ -1,141 +0,0 @@ -require 'cases/sqlserver_helper' -require 'models/job' -require 'models/person' -require 'models/reference' -require 'models/book' -require 'models/author' -require 'models/subscription' -require 'models/post' -require 'models/comment' -require 'models/categorization' - -class OffsetAndLimitTestSqlserver < ActiveRecord::TestCase - - fixtures :jobs, :people, :references, :subscriptions, - :authors, :posts, :comments, :categorizations - - setup :create_10_books - teardown :destroy_all_books - - - context 'When selecting with limit' do - - should 'alter sql to limit number of records returned' do - assert_sql(/SELECT TOP \(10\)/) { Book.limit(10).all } - end - - end - - context 'When selecting with offset' do - - should 'have limit (top) of 9223372036854775807 if only offset is passed' do - assert_sql(/SELECT TOP \(9223372036854775807\) \[__rnt\]\.\* FROM.*WHERE \[__rnt\]\.\[__rn\] > \(1\)/) { Book.all(:offset=>1) } - end - - should 'support calling exists?' do - assert Book.offset(3).exists? - end - - end - - context 'When selecting with limit and offset' do - - should 'work with fully qualified table and columns in select' do - books = Book.all :select => 'books.id, books.name', :limit => 3, :offset => 5 - assert_equal Book.all[5,3].map(&:id), books.map(&:id) - end - - # ActiveRecord Regression 3.2.3? - # https://github.com/rails/rails/commit/a2c2f406612a1855fbc6fe816cf3e15b4ef531d3#commitcomment-1208811 - should_eventually 'allow sql literal for offset' do - assert_sql(/WHERE \[__rnt\]\.\[__rn\] > \(3-2\)/) { Book.limit(10).offset(Arel::Nodes::Ascending.new('3-2')).all } - assert_sql(/WHERE \[__rnt\]\.\[__rn\] > \(SELECT 8 AS \[count\]\)/) do - books = Book.all :limit => 3, :offset => Arel.sql('SELECT 8 AS [count]') - assert_equal 2, books.size, 'remember there are only 10 books and offset is 8' - end - end - - # ActiveRecord Regression 3.2.3? - # https://github.com/rails/rails/commit/a2c2f406612a1855fbc6fe816cf3e15b4ef531d3#commitcomment-1208811 - should_eventually 'not convert strings which look like integers to integers' do - assert_sql(/WHERE \[__rnt\]\.\[__rn\] > \(N''5''\)/) { Book.limit(10).offset('5').all } - end - - should 'alter SQL to limit number of records returned offset by specified amount' do - sql = %|EXEC sp_executesql N'SELECT TOP (3) [__rnt].* FROM ( SELECT ROW_NUMBER() OVER (ORDER BY [books].[id] ASC) AS [__rn], [books].* FROM [books] ) AS [__rnt] WHERE [__rnt].[__rn] > (5) ORDER BY [__rnt].[__rn] ASC'| - assert_sql(sql) { Book.limit(3).offset(5).all } - end - - should 'add locks to deepest sub select' do - pattern = /FROM \[books\]\s+WITH \(NOLOCK\)/ - assert_sql(pattern) { Book.all :limit => 3, :offset => 5, :lock => 'WITH (NOLOCK)' } - assert_sql(pattern) { Book.count :limit => 3, :offset => 5, :lock => 'WITH (NOLOCK)' } - end - - should 'have valid sort order' do - order_row_numbers = SqlServerOrderRowNumber.offset(7).order("c DESC").select("c, ROW_NUMBER() OVER (ORDER BY c ASC) AS [dummy]").all.map(&:c) - assert_equal [2, 1, 0], order_row_numbers - end - - should 'work with through associations' do - assert_equal people(:david), jobs(:unicyclist).people.limit(1).offset(1).all.first - end - - should 'work with through uniq associations' do - david = authors(:david) - mary = authors(:mary) - thinking = posts(:thinking) - # Mary has duplicate categorizations to the thinking post. - assert_equal [thinking, thinking], mary.categorized_posts.all - assert_equal [thinking], mary.unique_categorized_posts.limit(2).offset(0) - # Paging thru David's uniq ordered comments, with count too. - assert_equal [1, 2, 3, 5, 6, 7, 8, 9, 10, 12], david.ordered_uniq_comments.map(&:id) - assert_equal [3, 5], david.ordered_uniq_comments.limit(2).offset(2).map(&:id) - assert_equal 2, david.ordered_uniq_comments.limit(2).offset(2).count - assert_equal [8, 9, 10, 12], david.ordered_uniq_comments.limit(5).offset(6).map(&:id) - assert_equal 4, david.ordered_uniq_comments.limit(5).offset(6).count - end - - should 'remove [__rnt] table names from relation reflection and hence do not eager loading' do - create_10_books - create_10_books - assert_queries(1) { Book.limit(10).offset(10).includes(:subscriptions).all } - end - - - context 'with count' do - - should 'pass a gauntlet of window tests' do - Book.first.destroy - Book.first.destroy - Book.first.destroy - assert_equal 7, Book.count - assert_equal 1, Book.limit(1).offset(1).size - assert_equal 1, Book.limit(1).offset(5).size - assert_equal 1, Book.limit(1).offset(6).size - assert_equal 0, Book.limit(1).offset(7).size - assert_equal 3, Book.limit(3).offset(4).size - assert_equal 2, Book.limit(3).offset(5).size - assert_equal 1, Book.limit(3).offset(6).size - assert_equal 0, Book.limit(3).offset(7).size - assert_equal 0, Book.limit(3).offset(8).size - end - - end - - end - - - protected - - def create_10_books - Book.delete_all - @books = (1..10).map {|i| Book.create!} - end - - def destroy_all_books - @books.each { |b| b.destroy } - end - -end - diff --git a/test/cases/optimizer_hints_test_sqlserver.rb b/test/cases/optimizer_hints_test_sqlserver.rb new file mode 100644 index 000000000..fde5e9144 --- /dev/null +++ b/test/cases/optimizer_hints_test_sqlserver.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" +require "models/company" + +class OptimizerHitsTestSQLServer < ActiveRecord::TestCase + fixtures :companies + + it "apply optimizations" do + assert_queries_match(%r{\ASELECT .+ FROM .+ OPTION \(HASH GROUP\)\z}) do + companies = Company.optimizer_hints("HASH GROUP") + companies = companies.distinct.select("firm_id") + assert_includes companies.explain.inspect, "| Hash Match | Aggregate |" + end + + assert_queries_match(%r{\ASELECT .+ FROM .+ OPTION \(ORDER GROUP\)\z}) do + companies = Company.optimizer_hints("ORDER GROUP") + companies = companies.distinct.select("firm_id") + assert_includes companies.explain.inspect, "| Stream Aggregate | Aggregate |" + end + end + + it "apply multiple optimizations" do + assert_queries_match(%r{\ASELECT .+ FROM .+ OPTION \(HASH GROUP, FAST 1\)\z}) do + companies = Company.optimizer_hints("HASH GROUP", "FAST 1") + companies = companies.distinct.select("firm_id") + assert_includes companies.explain.inspect, "| Hash Match | Flow Distinct |" + end + end + + it "support subqueries" do + assert_queries_match(%r{SELECT COUNT\(count_column\) FROM \(SELECT .*\) subquery_for_count OPTION \(MAXDOP 2\)}) do + companies = Company.optimizer_hints("MAXDOP 2") + companies = companies.select(:id).where(firm_id: [0, 1]).limit(3) + assert_equal 3, companies.count + end + end + + it "support order" do + assert_queries_match(%r{\ASELECT .+ FROM .+ ORDER .+ OPTION .+\z}) do + companies = Company.optimizer_hints("LABEL='FindCompanies'") + companies = companies.order(:id) + companies.to_a + end + end + + it "sanitize values" do + assert_queries_match(%r{\ASELECT .+ FROM .+ OPTION \(HASH GROUP\)\z}) do + companies = Company.optimizer_hints("OPTION (HASH GROUP)") + companies = companies.distinct.select("firm_id") + companies.to_a + end + + assert_queries_match(%r{\ASELECT .+ FROM .+ OPTION \(HASH GROUP\)\z}) do + companies = Company.optimizer_hints("OPTION(HASH GROUP)") + companies = companies.distinct.select("firm_id") + companies.to_a + end + + assert_queries_match(%r{\ASELECT .+ FROM .+ OPTION \(TABLE HINT \(\[companies\], INDEX\(1\)\)\)\z}) do + companies = Company.optimizer_hints("OPTION(TABLE HINT ([companies], INDEX(1)))") + companies = companies.distinct.select("firm_id") + companies.to_a + end + + assert_queries_match(%r{\ASELECT .+ FROM .+ OPTION \(HASH GROUP\)\z}) do + companies = Company.optimizer_hints("Option(HASH GROUP)") + companies = companies.distinct.select("firm_id") + companies.to_a + end + end + + it "skip optimization after unscope" do + assert_queries_match("SELECT DISTINCT [companies].[firm_id] FROM [companies]") do + companies = Company.optimizer_hints("HASH GROUP") + companies = companies.distinct.select("firm_id") + companies.unscope(:optimizer_hints).load + end + end +end diff --git a/test/cases/order_test_sqlserver.rb b/test/cases/order_test_sqlserver.rb index 0ba71d995..82f43d539 100644 --- a/test/cases/order_test_sqlserver.rb +++ b/test/cases/order_test_sqlserver.rb @@ -1,148 +1,153 @@ -require 'cases/sqlserver_helper' -require 'models/post' +# frozen_string_literal: true -class OrderTestSqlserver < ActiveRecord::TestCase - +require "cases/helper_sqlserver" +require "models/post" + +class OrderTestSQLServer < ActiveRecord::TestCase fixtures :posts - - context 'Order by' do - - should 'not mangel complex order clauses' do - xyz_order = "CASE WHEN [title] LIKE N'XYZ%' THEN 0 ELSE 1 END" - xyz_post = Post.create :title => 'XYZ Post', :body => 'Test cased orders.' - assert_equal xyz_post, Post.order(Arel::Nodes::Ordering.new(Arel.sql(xyz_order))).first - end - - should 'support column' do - order = "title" - post1 = Post.create :title => 'AAA Post', :body => 'Test cased orders.' - assert_equal post1, Post.order(order).first - end - - should 'support column ASC' do - order = "title ASC" - post1 = Post.create :title => 'AAA Post', :body => 'Test cased orders.' - assert_equal post1, Post.order(order).first - end - - should 'support column DESC' do - order = "title DESC" - post1 = Post.create :title => 'ZZZ Post', :body => 'Test cased orders.' - assert_equal post1, Post.order(order).first - end - - should 'support column as symbol' do - order = :title - post1 = Post.create :title => 'AAA Post', :body => 'Test cased orders.' - assert_equal post1, Post.order(order).first - end - - should 'support table and column' do - order = "posts.title" - post1 = Post.create :title => 'AAA Post', :body => 'Test cased orders.' - assert_equal post1, Post.order(order).first - end - - should 'support quoted column' do - order = "[title]" - post1 = Post.create :title => 'AAA Post', :body => 'Test cased orders.' - assert_equal post1, Post.order(order).first - end - - should 'support quoted table and column' do - order = "[posts].[title]" - post1 = Post.create :title => 'AAA Post', :body => 'Test cased orders.' - assert_equal post1, Post.order(order).first - end - - should 'support primary: column, secondary: column' do - order = "title DESC, body" - post1 = Post.create :title => 'ZZZ Post', :body => 'Test cased orders.' - post2 = Post.create :title => 'ZZZ Post', :body => 'ZZZ Test cased orders.' - assert_equal post1, Post.order(order).first - assert_equal post2, Post.order(order).second - end - - should 'support primary: table and column, secondary: column' do - order = "posts.title DESC, body" - post1 = Post.create :title => 'ZZZ Post', :body => 'Test cased orders.' - post2 = Post.create :title => 'ZZZ Post', :body => 'ZZZ Test cased orders.' - assert_equal post1, Post.order(order).first - assert_equal post2, Post.order(order).second - end - - should 'support primary: case expression, secondary: column' do - order = "(CASE WHEN [title] LIKE N'ZZZ%' THEN title ELSE '' END) DESC, body" - post1 = Post.create :title => 'ZZZ Post', :body => 'Test cased orders.' - post2 = Post.create :title => 'ZZZ Post', :body => 'ZZZ Test cased orders.' - assert_equal post1, Post.order(order).first - assert_equal post2, Post.order(order).second - end - - should 'support primary: quoted table and column, secondary: case expresion' do - order = "[posts].[body] DESC, (CASE WHEN [title] LIKE N'ZZZ%' THEN title ELSE '' END) DESC" - post1 = Post.create :title => 'ZZZ Post', :body => 'ZZZ Test cased orders.' - post2 = Post.create :title => 'ZZY Post', :body => 'ZZZ Test cased orders.' - assert_equal post1, Post.order(order).first - assert_equal post2, Post.order(order).second - end - - should 'support inline function' do - order = "LEN(title)" - post1 = Post.create :title => 'A', :body => 'AAA Test cased orders.' - assert_equal post1, Post.order(order).first - end - - should 'support inline function with parameters' do - order = "SUBSTRING(title, 1, 3)" - post1 = Post.create :title => 'AAA Post', :body => 'Test cased orders.' - assert_equal post1, Post.order(order).first - end - - should 'support inline function with parameters DESC' do - order = "SUBSTRING(title, 1, 3) DESC" - post1 = Post.create :title => 'ZZZ Post', :body => 'Test cased orders.' - assert_equal post1, Post.order(order).first - end - - should 'support primary: inline function, secondary: column' do - order = "LEN(title), body" - post1 = Post.create :title => 'A', :body => 'AAA Test cased orders.' - post2 = Post.create :title => 'A', :body => 'Test cased orders.' - assert_equal post1, Post.order(order).first - assert_equal post2, Post.order(order).second - end - - should 'support primary: inline function, secondary: column with direction' do - order = "LEN(title) ASC, body DESC" - post1 = Post.create :title => 'A', :body => 'ZZZ Test cased orders.' - post2 = Post.create :title => 'A', :body => 'Test cased orders.' - assert_equal post1, Post.order(order).first - assert_equal post2, Post.order(order).second - end - - should 'support primary: column, secondary: inline function' do - order = "body DESC, LEN(title)" - post1 = Post.create :title => 'Post', :body => 'ZZZ Test cased orders.' - post2 = Post.create :title => 'Longer Post', :body => 'ZZZ Test cased orders.' - assert_equal post1, Post.order(order).first - assert_equal post2, Post.order(order).second - end - - should 'support primary: case expression, secondary: inline function' do - order = "CASE WHEN [title] LIKE N'ZZZ%' THEN title ELSE '' END DESC, LEN(body) ASC" - post1 = Post.create :title => 'ZZZ Post', :body => 'Z' - post2 = Post.create :title => 'ZZZ Post', :body => 'Test cased orders.' - assert_equal post1, Post.order(order).first - assert_equal post2, Post.order(order).second - end - - should 'support primary: inline function, secondary: case expression' do - order = "LEN(body), CASE WHEN [title] LIKE N'ZZZ%' THEN title ELSE '' END DESC" - post1 = Post.create :title => 'ZZZ Post', :body => 'Z' - post2 = Post.create :title => 'Post', :body => 'Z' - assert_equal post1, Post.order(order).first - assert_equal post2, Post.order(order).second - end + + it "not mangel complex order clauses" do + xyz_order = "CASE WHEN [title] LIKE N'XYZ%' THEN 0 ELSE 1 END" + xyz_post = Post.create title: "XYZ Post", body: "Test cased orders." + assert_equal xyz_post, Post.order(Arel.sql(xyz_order)).first + end + + it "support column" do + order = "title" + post1 = Post.create title: "AAA Post", body: "Test cased orders." + assert_equal post1, Post.order(order).first + end + + it "support column ASC" do + order = "title ASC" + post1 = Post.create title: "AAA Post", body: "Test cased orders." + assert_equal post1, Post.order(order).first + end + + it "support column DESC" do + order = "title DESC" + post1 = Post.create title: "ZZZ Post", body: "Test cased orders." + assert_equal post1, Post.order(order).first + end + + it "support column as symbol" do + order = :title + post1 = Post.create title: "AAA Post", body: "Test cased orders." + assert_equal post1, Post.order(order).first + end + + it "support table and column" do + order = "posts.title" + post1 = Post.create title: "AAA Post", body: "Test cased orders." + assert_equal post1, Post.order(order).first + end + + it "support quoted column" do + order = "[title]" + post1 = Post.create title: "AAA Post", body: "Test cased orders." + assert_equal post1, Post.order(Arel.sql(order)).first + end + + it "support quoted table and column" do + order = "[posts].[title]" + post1 = Post.create title: "AAA Post", body: "Test cased orders." + assert_equal post1, Post.order(Arel.sql(order)).first + end + + it "support primary: column, secondary: column" do + order = "title DESC, body" + post1 = Post.create title: "ZZZ Post", body: "Test cased orders." + post2 = Post.create title: "ZZZ Post", body: "ZZZ Test cased orders." + assert_equal post1, Post.order(order).first + assert_equal post2, Post.order(order).second + end + + it "support primary: table and column, secondary: column" do + order = "posts.title DESC, body" + post1 = Post.create title: "ZZZ Post", body: "Test cased orders." + post2 = Post.create title: "ZZZ Post", body: "ZZZ Test cased orders." + assert_equal post1, Post.order(order).first + assert_equal post2, Post.order(order).second + end + + it "support primary: case expression, secondary: column" do + order = "(CASE WHEN [title] LIKE N'ZZZ%' THEN title ELSE '' END) DESC, body" + post1 = Post.create title: "ZZZ Post", body: "Test cased orders." + post2 = Post.create title: "ZZZ Post", body: "ZZZ Test cased orders." + assert_equal post1, Post.order(Arel.sql(order)).first + assert_equal post2, Post.order(Arel.sql(order)).second + end + + it "support primary: quoted table and column, secondary: case expresion" do + order = "[posts].[body] DESC, (CASE WHEN [title] LIKE N'ZZZ%' THEN title ELSE '' END) DESC" + post1 = Post.create title: "ZZZ Post", body: "ZZZ Test cased orders." + post2 = Post.create title: "ZZY Post", body: "ZZZ Test cased orders." + assert_equal post1, Post.order(Arel.sql(order)).first + assert_equal post2, Post.order(Arel.sql(order)).second + end + + it "support inline function" do + order = "LEN(title)" + post1 = Post.create title: "A", body: "AAA Test cased orders." + assert_equal post1, Post.order(Arel.sql(order)).first + end + + it "support inline function with parameters" do + order = "SUBSTRING(title, 1, 3)" + post1 = Post.create title: "AAA Post", body: "Test cased orders." + assert_equal post1, Post.order(Arel.sql(order)).first + end + + it "support inline function with parameters DESC" do + order = "SUBSTRING(title, 1, 3) DESC" + post1 = Post.create title: "ZZZ Post", body: "Test cased orders." + assert_equal post1, Post.order(Arel.sql(order)).first + end + + it "support primary: inline function, secondary: column" do + order = "LEN(title), body" + post1 = Post.create title: "A", body: "AAA Test cased orders." + post2 = Post.create title: "A", body: "Test cased orders." + assert_equal post1, Post.order(Arel.sql(order)).first + assert_equal post2, Post.order(Arel.sql(order)).second + end + + it "support primary: inline function, secondary: column with direction" do + order = "LEN(title) ASC, body DESC" + post1 = Post.create title: "A", body: "ZZZ Test cased orders." + post2 = Post.create title: "A", body: "Test cased orders." + assert_equal post1, Post.order(Arel.sql(order)).first + assert_equal post2, Post.order(Arel.sql(order)).second + end + + it "support primary: column, secondary: inline function" do + order = "body DESC, LEN(title)" + post1 = Post.create title: "Post", body: "ZZZ Test cased orders." + post2 = Post.create title: "Longer Post", body: "ZZZ Test cased orders." + assert_equal post1, Post.order(Arel.sql(order)).first + assert_equal post2, Post.order(Arel.sql(order)).second + end + + it "support primary: case expression, secondary: inline function" do + order = "CASE WHEN [title] LIKE N'ZZZ%' THEN title ELSE '' END DESC, LEN(body) ASC" + post1 = Post.create title: "ZZZ Post", body: "Z" + post2 = Post.create title: "ZZZ Post", body: "Test cased orders." + assert_equal post1, Post.order(Arel.sql(order)).first + assert_equal post2, Post.order(Arel.sql(order)).second + end + + it "support primary: inline function, secondary: case expression" do + order = "LEN(body), CASE WHEN [title] LIKE N'ZZZ%' THEN title ELSE '' END DESC" + post1 = Post.create title: "ZZZ Post", body: "Z" + post2 = Post.create title: "Post", body: "Z" + assert_equal post1, Post.order(Arel.sql(order)).first + assert_equal post2, Post.order(Arel.sql(order)).second + end + + # Executing this kind of queries will raise "A column has been specified more than once in the order by list" + # This test shows that we don't do anything to prevent this + it "doesn't deduplicate semantically equal orders" do + sql = Post.order(:id).order("posts.id ASC").to_sql + assert_equal "SELECT [posts].* FROM [posts] ORDER BY [posts].[id] ASC, posts.id ASC", sql end end diff --git a/test/cases/persistence_test_sqlserver.rb b/test/cases/persistence_test_sqlserver.rb deleted file mode 100644 index 2ece8a14e..000000000 --- a/test/cases/persistence_test_sqlserver.rb +++ /dev/null @@ -1,50 +0,0 @@ -require 'cases/sqlserver_helper' -require 'models/post' -require 'models/comment' -require 'models/author' -require 'models/topic' -require 'models/reply' -require 'models/category' -require 'models/company' -require 'models/developer' -require 'models/project' -require 'models/minimalistic' -require 'models/warehouse_thing' -require 'models/parrot' -require 'models/minivan' -require 'models/person' -require 'rexml/document' -require 'active_support/core_ext/exception' - -class PersistencesTestSqlserver < ActiveRecord::TestCase -end - -class PersistencesTest < ActiveRecord::TestCase - - fixtures :topics, :companies, :developers, :projects, :computers, :accounts, :minimalistics, 'warehouse-things', :authors, :categorizations, :categories, :posts, :minivans - - COERCED_TESTS = [:test_update_all_doesnt_ignore_order] - - include SqlserverCoercedTest - - def test_coerced_update_all_doesnt_ignore_order - assert_equal authors(:david).id + 1, authors(:mary).id - test_update_with_order_succeeds = lambda do |order| - begin - Author.order(order).update_all('id = id + 1') - rescue ActiveRecord::ActiveRecordError - false - end - end - if test_update_with_order_succeeds.call('id DESC') - assert !test_update_with_order_succeeds.call('id ASC') - else - assert_sql(/UPDATE .* \(SELECT .* ORDER BY id DESC\)/i) do - test_update_with_order_succeeds.call('id DESC') - end - end - end - -end - - diff --git a/test/cases/pessimistic_locking_test_sqlserver.rb b/test/cases/pessimistic_locking_test_sqlserver.rb index fb720f2c4..13640e6c0 100644 --- a/test/cases/pessimistic_locking_test_sqlserver.rb +++ b/test/cases/pessimistic_locking_test_sqlserver.rb @@ -1,78 +1,102 @@ -require 'cases/sqlserver_helper' -require 'models/person' -require 'models/reader' +# frozen_string_literal: true -class PessimisticLockingTestSqlserver < ActiveRecord::TestCase - - self.use_transactional_fixtures = false +require "cases/helper_sqlserver" +require "models/person" +require "models/reader" + +class PessimisticLockingTestSQLServer < ActiveRecord::TestCase fixtures :people, :readers - - def setup - Person.columns; Reader.columns # Avoid introspection queries during tests. + + before do + Person.columns + Reader.columns + end + + it "uses with updlock by default" do + assert_queries_match %r{SELECT \[people\]\.\* FROM \[people\] WITH\(UPDLOCK\)} do + _(Person.lock(true).to_a).must_equal Person.all.to_a + end end - - context 'For simple finds with default lock option' do - should 'lock with simple find' do + describe "For simple finds with default lock option" do + it "lock with simple find" do assert_nothing_raised do Person.transaction do - Person.find 1, :lock => true + _(Person.lock(true).find(1)).must_equal Person.find(1) end end end - should 'lock with scoped find' do + it "lock with scoped find" do assert_nothing_raised do Person.transaction do - Person.send(:with_scope, :find => { :lock => true }) do - Person.find 1 + Person.lock(true).scoping do + _(Person.find(1)).must_equal Person.find(1) end end end end - should 'lock with eager find' do + it "lock with eager find" do assert_nothing_raised do Person.transaction do - Person.find 1, :include => :readers, :lock => true + person = Person.lock(true).includes(:readers).find(1) + _(person).must_equal Person.find(1) end end end - should 'reload with lock when #lock! called' do - assert_nothing_raised do - Person.transaction do - person = Person.find 1 - old, person.first_name = person.first_name, 'fooman' - person.lock! - assert_equal old, person.first_name - end + it "can add a custom lock directive" do + assert_queries_match %r{SELECT \[people\]\.\* FROM \[people\] WITH\(HOLDLOCK, ROWLOCK\)} do + Person.lock("WITH(HOLDLOCK, ROWLOCK)").load end end - - should 'simply add lock to find all' do - assert_sql %r|SELECT \[people\]\.\* FROM \[people\] WITH \(NOLOCK\)| do - Person.all(:lock => 'WITH (NOLOCK)') + + describe "joining tables" do + it "joined tables use updlock by default" do + assert_queries_match %r{SELECT \[people\]\.\* FROM \[people\] WITH\(UPDLOCK\) INNER JOIN \[readers\] WITH\(UPDLOCK\)\s+ON \[readers\]\.\[person_id\] = \[people\]\.\[id\]} do + Person.lock(true).joins(:readers).load + end end - end + it "joined tables can use custom lock directive" do + assert_queries_match %r{SELECT \[people\]\.\* FROM \[people\] WITH\(NOLOCK\) INNER JOIN \[readers\] WITH\(NOLOCK\)\s+ON \[readers\]\.\[person_id\] = \[people\]\.\[id\]} do + Person.lock("WITH(NOLOCK)").joins(:readers).load + end + end + + it "left joined tables use updlock by default" do + assert_queries_match %r{SELECT \[people\]\.\* FROM \[people\] WITH\(UPDLOCK\) LEFT OUTER JOIN \[readers\] WITH\(UPDLOCK\)\s+ON \[readers\]\.\[person_id\] = \[people\]\.\[id\]} do + Person.lock(true).left_joins(:readers).load + end + end + + it "left joined tables can use custom lock directive" do + assert_queries_match %r{SELECT \[people\]\.\* FROM \[people\] WITH\(NOLOCK\) LEFT OUTER JOIN \[readers\] WITH\(NOLOCK\)\s+ON \[readers\]\.\[person_id\] = \[people\]\.\[id\]} do + Person.lock("WITH(NOLOCK)").left_joins(:readers).load + end + end + end end - - context 'For paginated finds' do - - setup do - 20.times { |n| Person.create!(:first_name => "Thing_#{n}") } + + describe "For paginated finds" do + before do + Person.delete_all + 20.times { |n| Person.create!(first_name: "Thing_#{n}") } end - - should 'cope with eager loading un-locked paginated' do - eager_ids_sql = /SELECT TOP \(5\).*FROM \[people\] WITH \(NOLOCK\)/ - loader_sql = /FROM \[people\] WITH \(NOLOCK\).*WHERE \[people\]\.\[id\] IN/ - assert_sql(eager_ids_sql,loader_sql) do - Person.all(:include => :readers, :lock => 'WITH (NOLOCK)', :limit => 5, :offset => 10) + + it "copes with eager loading un-locked paginated" do + eager_ids_sql = /SELECT\s+DISTINCT \[people\].\[id\] FROM \[people\] WITH\(UPDLOCK\) LEFT OUTER JOIN \[readers\] WITH\(UPDLOCK\)\s+ON \[readers\].\[person_id\] = \[people\].\[id\]\s+ORDER BY \[people\].\[id\] ASC OFFSET @0 ROWS FETCH NEXT @1 ROWS ONLY/ + loader_sql = /SELECT.*FROM \[people\] WITH\(UPDLOCK\).*WHERE \[people\]\.\[id\] IN/ + + assert_queries_match(/#{eager_ids_sql}|#{loader_sql}/, count: 2) do + people = Person.lock(true).limit(5).offset(10).includes(:readers).references(:readers).to_a + _(people[0].first_name).must_equal "Thing_10" + _(people[1].first_name).must_equal "Thing_11" + _(people[2].first_name).must_equal "Thing_12" + _(people[3].first_name).must_equal "Thing_13" + _(people[4].first_name).must_equal "Thing_14" end end - end - - end diff --git a/test/cases/primary_keys_test_sqlserver.rb b/test/cases/primary_keys_test_sqlserver.rb new file mode 100644 index 000000000..1de09790d --- /dev/null +++ b/test/cases/primary_keys_test_sqlserver.rb @@ -0,0 +1,103 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" +require "support/schema_dumping_helper" + +class PrimaryKeyUuidTypeTest < ActiveRecord::TestCase + include SchemaDumpingHelper + + self.use_transactional_tests = false + + class Barcode < ActiveRecord::Base + end + + setup do + @connection = ActiveRecord::Base.lease_connection + @connection.create_table(:barcodes, primary_key: "code", id: :uuid, force: true) + end + + teardown do + @connection.drop_table(:barcodes, if_exists: true) + end + + def test_any_type_primary_key + assert_equal "code", Barcode.primary_key + + column = Barcode.column_for_attribute(Barcode.primary_key) + assert_not column.null + assert_equal :uuid, column.type + assert_not_predicate column, :is_identity? + assert_predicate column, :is_primary? + ensure + Barcode.reset_column_information + end + + test "schema dump primary key includes default" do + schema = dump_table_schema "barcodes" + assert_match %r/create_table "barcodes", primary_key: "code", id: :uuid, default: -> { "newid\(\)" }/, schema + end +end + +class PrimaryKeyIntegerTest < ActiveRecord::TestCase + include SchemaDumpingHelper + + self.use_transactional_tests = false + + class Barcode < ActiveRecord::Base + end + + class Widget < ActiveRecord::Base + end + + setup do + @connection = ActiveRecord::Base.lease_connection + end + + teardown do + @connection.drop_table :barcodes, if_exists: true + @connection.drop_table :widgets, if_exists: true + end + + test "integer primary key without default" do + @connection.create_table(:widgets, id: :integer, force: true) + column = @connection.columns(:widgets).find { |c| c.name == "id" } + assert_predicate column, :is_primary? + assert_predicate column, :is_identity? + assert_equal :integer, column.type + assert_not_predicate column, :bigint? + + schema = dump_table_schema "widgets" + assert_match %r{create_table "widgets", id: :integer, force: :cascade do}, schema + end + + test "bigint primary key without default" do + @connection.create_table(:widgets, id: :bigint, force: true) + column = @connection.columns(:widgets).find { |c| c.name == "id" } + assert_predicate column, :is_primary? + assert_predicate column, :is_identity? + assert_equal :integer, column.type + assert_predicate column, :bigint? + + schema = dump_table_schema "widgets" + assert_match %r{create_table "widgets", force: :cascade do}, schema + end + + test "don't set identity to integer and bigint when there is a default" do + @connection.create_table(:barcodes, id: :integer, default: nil, force: true) + @connection.create_table(:widgets, id: :bigint, default: nil, force: true) + + column = @connection.columns(:widgets).find { |c| c.name == "id" } + assert_predicate column, :is_primary? + assert_not_predicate column, :is_identity? + + schema = dump_table_schema "widgets" + assert_match %r{create_table "widgets", id: :bigint, default: nil, force: :cascade do}, schema + + column = @connection.columns(:barcodes).find { |c| c.name == "id" } + assert_predicate column, :is_primary? + assert_not_predicate column, :is_identity? + + schema = dump_table_schema "barcodes" + assert_match %r{create_table "barcodes", id: :integer, default: nil, force: :cascade do}, schema + end +end diff --git a/test/cases/query_cache_test_sqlserver.rb b/test/cases/query_cache_test_sqlserver.rb deleted file mode 100644 index b16babc71..000000000 --- a/test/cases/query_cache_test_sqlserver.rb +++ /dev/null @@ -1,22 +0,0 @@ -require 'cases/sqlserver_helper' -require 'models/task' - -class QueryCacheTestSqlserver < ActiveRecord::TestCase -end - -class QueryCacheTest < ActiveRecord::TestCase - - COERCED_TESTS = [:test_cache_does_not_wrap_string_results_in_arrays] - - include SqlserverCoercedTest - - fixtures :tasks - - def test_coerced_cache_does_not_wrap_string_results_in_arrays - Task.cache do - assert_instance_of Fixnum, Task.connection.select_value("SELECT count(*) AS count_all FROM tasks") - end - end - - -end diff --git a/test/cases/rake_test_sqlserver.rb b/test/cases/rake_test_sqlserver.rb new file mode 100644 index 000000000..1a6d7ec8a --- /dev/null +++ b/test/cases/rake_test_sqlserver.rb @@ -0,0 +1,206 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class SQLServerRakeTest < ActiveRecord::TestCase + self.use_transactional_tests = false + + cattr_accessor :azure_skip + self.azure_skip = connection_sqlserver_azure? + + let(:db_tasks) { ActiveRecord::Tasks::DatabaseTasks } + let(:new_database) { "activerecord_unittest_tasks" } + let(:default_configuration) { ARTest.test_configuration_hashes["arunit"] } + let(:configuration) { default_configuration.merge("database" => new_database) } + let(:db_config) { ActiveRecord::Base.configurations.resolve(configuration) } + + before { skip "on azure" if azure_skip } + before { disconnect! unless azure_skip } + after { reconnect unless azure_skip } + + private + + def disconnect! + connection.disconnect! + end + + def reconnect + config = default_configuration + if connection_sqlserver_azure? + ActiveRecord::Base.establish_connection(config.merge("database" => "master")) + begin + connection.drop_database(new_database) + rescue + nil + end + disconnect! + ActiveRecord::Base.establish_connection(config) + else + ActiveRecord::Base.establish_connection(config) + begin + connection.drop_database(new_database) + rescue + nil + end + end + end +end + +class SQLServerRakeCreateTest < SQLServerRakeTest + self.azure_skip = false + + it "establishes connection to database after create " do + quietly { db_tasks.create configuration } + _(connection.current_database).must_equal(new_database) + end + + it "creates database with default collation" do + quietly { db_tasks.create configuration } + _(connection.collation).must_equal "SQL_Latin1_General_CP1_CI_AS" + end + + it "creates database with given collation" do + quietly { db_tasks.create configuration.merge("collation" => "Latin1_General_CI_AS") } + _(connection.collation).must_equal "Latin1_General_CI_AS" + end + + it "prints error message when database exists" do + quietly { db_tasks.create configuration } + message = capture(:stderr) { db_tasks.create configuration } + _(message).must_match %r{activerecord_unittest_tasks.*already exists} + end +end + +class SQLServerRakeDropTest < SQLServerRakeTest + self.azure_skip = false + + it "drops database and uses master" do + quietly do + db_tasks.create configuration + db_tasks.drop configuration + end + _(connection.current_database).must_equal "master" + end + + it "prints error message when database does not exist" do + message = capture(:stderr) { db_tasks.drop configuration.merge("database" => "doesnotexist") } + _(message).must_match %r{'doesnotexist' does not exist} + end +end + +class SQLServerRakePurgeTest < SQLServerRakeTest + before do + quietly { db_tasks.create(configuration) } + connection.create_table :users, force: true do |t| + t.string :name, :email + t.timestamps null: false + end + end + + it "clears active connections, drops database, and recreates with established connection" do + _(connection.current_database).must_equal(new_database) + _(connection.tables).must_include "users" + quietly { db_tasks.purge(configuration) } + _(connection.current_database).must_equal(new_database) + _(connection.tables).wont_include "users" + end +end + +class SQLServerRakeCharsetTest < SQLServerRakeTest + before do + quietly { db_tasks.create(configuration) } + end + + it "retrieves charset" do + _(db_tasks.charset(configuration)).must_equal "iso_1" + end +end + +class SQLServerRakeCollationTest < SQLServerRakeTest + before do + quietly { db_tasks.create(configuration) } + end + + it "retrieves collation" do + _(db_tasks.collation(configuration)).must_equal "SQL_Latin1_General_CP1_CI_AS" + end +end + +class SQLServerRakeStructureDumpLoadTest < SQLServerRakeTest + let(:filename) { File.join ARTest::SQLServer.migrations_root, "structure.sql" } + let(:filedata) { File.read(filename) } + + before do + quietly { db_tasks.create(configuration) } + connection.create_table :users, force: true do |t| + t.string :name, :email + t.text :background1 + t.text_basic :background2 + t.timestamps null: false + end + end + + after do + FileUtils.rm_rf(filename) + end + + it "dumps structure and accounts for defncopy oddities" do + skip "debug defncopy on windows later" if host_windows? + + quietly { db_tasks.structure_dump configuration, filename } + + _(filedata).wont_match %r{\AUSE.*\z} + _(filedata).wont_match %r{\AGO.*\z} + _(filedata).must_match %r{\[email\]\s+nvarchar\(4000\)} + _(filedata).must_match %r{\[background1\]\s+nvarchar\(max\)} + _(filedata).must_match %r{\[background2\]\s+text\s+} + end + + it "can load dumped structure" do + skip "debug defncopy on windows later" if host_windows? + + quietly { db_tasks.structure_dump configuration, filename } + + _(filedata).must_match %r{CREATE TABLE \[dbo\]\.\[users\]} + db_tasks.purge(configuration) + _(connection.tables).wont_include "users" + db_tasks.load_schema db_config, :sql, filename + _(connection.tables).must_include "users" + end +end + +class SQLServerRakeSchemaCacheDumpLoadTest < SQLServerRakeTest + let(:filename) { File.join ARTest::SQLServer.test_root_sqlserver, "schema_cache.yml" } + let(:filedata) { File.read(filename) } + + before do + quietly { db_tasks.create(configuration) } + + connection.create_table :users, force: true do |t| + t.string :name, null: false + end + end + + after do + FileUtils.rm_rf(filename) + end + + it "dumps schema cache with SQL Server metadata" do + quietly { db_tasks.dump_schema_cache connection, filename } + + filedata = File.read(filename) + _schema_cache = YAML.respond_to?(:unsafe_load) ? YAML.unsafe_load(filedata) : YAML.load(filedata) + + col_id, col_name = connection.schema_cache.columns("users") + + assert col_id.is_identity + assert col_id.is_primary + assert_equal col_id.ordinal_position, 1 + assert_equal col_id.table_name, "users" + + assert_not col_name.is_identity + assert_not col_name.is_primary + assert_equal col_name.ordinal_position, 2 + assert_equal col_name.table_name, "users" + end +end diff --git a/test/cases/resolver_test_sqlserver.rb b/test/cases/resolver_test_sqlserver.rb deleted file mode 100644 index 8437394cd..000000000 --- a/test/cases/resolver_test_sqlserver.rb +++ /dev/null @@ -1,48 +0,0 @@ -require "cases/helper" - -module ActiveRecord - class Base - class ConnectionSpecification - - class ResolverTest < ActiveRecord::TestCase - - include SqlserverCoercedTest - - COERCED_TESTS = [ - :test_url_host_no_db, - :test_url_host_db, - :test_url_port - ] - - def test_coerced_test_url_host_no_db - spec = resolve 'sqlserver://foo?encoding=utf8' - assert_equal({ - :adapter => "sqlserver", - :database => "", - :host => "foo", - :encoding => "utf8" }, spec) - end - - def test_coerced_test_url_host_db - spec = resolve 'sqlserver://foo/bar?encoding=utf8' - assert_equal({ - :adapter => "sqlserver", - :database => "bar", - :host => "foo", - :encoding => "utf8" }, spec) - end - - def test_coerced_test_url_port - spec = resolve 'sqlserver://foo:123?encoding=utf8' - assert_equal({ - :adapter => "sqlserver", - :database => "", - :port => 123, - :host => "foo", - :encoding => "utf8" }, spec) - end - end - - end - end -end diff --git a/test/cases/schema_dumper_test_sqlserver.rb b/test/cases/schema_dumper_test_sqlserver.rb index 0db3f9ba5..39f4e0dc1 100644 --- a/test/cases/schema_dumper_test_sqlserver.rb +++ b/test/cases/schema_dumper_test_sqlserver.rb @@ -1,96 +1,292 @@ -require 'cases/sqlserver_helper' -require 'stringio' - -class SchemaDumperTestSqlserver < ActiveRecord::TestCase - - setup :find_all_tables - - context 'For primary keys' do - - should 'honor nonstandards' do - table_dump('movies') do |output| - match = output.match(%r{create_table "movies"(.*)do}) - assert_not_nil(match, "nonstandardpk table not found") - assert_match %r(:primary_key => "movieid"), match[1], "non-standard primary key not preserved" - end - end - +# frozen_string_literal: true + +require "cases/helper_sqlserver" +require "stringio" + +class SchemaDumperTestSQLServer < ActiveRecord::TestCase + before { all_tables } + + let(:all_tables) { ActiveRecord::Base.lease_connection.tables } + let(:schema) { @generated_schema } + + it "sst_datatypes" do + generate_schema_for_table "sst_datatypes" + + assert_line :bigint, type: "bigint", default: 42 + assert_line :int, type: "integer", default: 42 + assert_line :smallint, type: "integer", limit: 2, default: 42 + assert_line :tinyint, type: "integer", limit: 1, default: 42 + assert_line :bit, type: "boolean", default: true + assert_line :decimal_9_2, type: "decimal", precision: 9, scale: 2, default: 12345.01 + assert_line :numeric_18_0, type: "decimal", precision: 18, default: 191 + assert_line :numeric_36_2, type: "decimal", precision: 36, scale: 2, default: 12345678901234567890.01 + assert_line :money, type: "money", precision: 19, scale: 4, default: 4.2 + assert_line :smallmoney, type: "smallmoney", precision: 10, scale: 4, default: 4.2 + # Approximate Numerics + assert_line :float, type: "float", default: 123.00000001 + assert_line :real, type: "real", default: 123.45 + # Date and Time + assert_line :date, type: "date", default: "01-01-0001" + assert_line :datetime, type: "datetime", precision: nil, default: "01-01-1753 00:00:00.123" + assert_line :datetime2_7, type: "datetime", precision: 7, default: "12-31-9999 23:59:59.9999999" + assert_line :datetime2_3, type: "datetime", precision: 3 + assert_line :datetime2_1, type: "datetime", precision: 1 + assert_line :smalldatetime, type: "smalldatetime", default: "01-01-1901 15:45:00.0" + assert_line :time_7, type: "time", precision: 7, default: "04:20:00.2883215" + assert_line :time_2, type: "time", precision: 2 + assert_line :time_default, type: "time", precision: 7, default: "15:03:42.0621978" + # Character Strings + assert_line :char_10, type: "char", limit: 10, default: "1234567890" + assert_line :varchar_50, type: "varchar", limit: 50, default: "test varchar_50" + assert_line :varchar_max, type: "varchar_max", default: "test varchar_max" + assert_line :text, type: "text_basic", default: "test text" + # Unicode Character Strings + assert_line :nchar_10, type: "nchar", limit: 10, default: "12345678åå" + assert_line :nvarchar_50, type: "string", limit: 50, default: "test nvarchar_50 åå" + assert_line :nvarchar_max, type: "text", default: "test nvarchar_max åå" + assert_line :ntext, type: "ntext", default: "test ntext åå" + # Binary Strings + assert_line :binary_49, type: "binary_basic", limit: 49 + assert_line :varbinary_49, type: "varbinary", limit: 49 + assert_line :varbinary_max, type: "binary" + # Other Data Types + assert_line :uniqueidentifier, type: "uuid", default: -> { "newid()" } + assert_line :timestamp, type: "ss_timestamp" end - - context 'For integers' do - - should 'include limit constraint that match logic for smallint and bigint in #extract_limit' do - table_dump('integer_limits') do |output| - assert_match %r{c_int_1.*:limit => 2}, output - assert_match %r{c_int_2.*:limit => 2}, output - assert_match %r{c_int_3.*}, output - assert_match %r{c_int_4.*}, output - assert_no_match %r{c_int_3.*:limit}, output - assert_no_match %r{c_int_4.*:limit}, output - assert_match %r{c_int_5.*:limit => 8}, output - assert_match %r{c_int_6.*:limit => 8}, output - assert_match %r{c_int_7.*:limit => 8}, output - assert_match %r{c_int_8.*:limit => 8}, output - end - end - + + it "sst_datatypes_migration" do + columns = SSTestDatatypeMigration.columns_hash + generate_schema_for_table "sst_datatypes_migration" + + # Simple Rails conventions + _(columns["integer_col"].sql_type).must_equal "int(4)" + _(columns["bigint_col"].sql_type).must_equal "bigint(8)" + _(columns["boolean_col"].sql_type).must_equal "bit" + _(columns["decimal_col"].sql_type).must_equal "decimal(18,0)" + _(columns["float_col"].sql_type).must_equal "float" + _(columns["string_col"].sql_type).must_equal "nvarchar(4000)" + _(columns["text_col"].sql_type).must_equal "nvarchar(max)" + _(columns["datetime_nil_precision_col"].sql_type).must_equal "datetime" + _(columns["datetime_col"].sql_type).must_equal "datetime2(6)" + _(columns["timestamp_col"].sql_type).must_equal "datetime2(6)" + _(columns["time_col"].sql_type).must_equal "time(7)" + _(columns["date_col"].sql_type).must_equal "date" + _(columns["binary_col"].sql_type).must_equal "varbinary(max)" + + assert_line :integer_col, type: "integer" + assert_line :bigint_col, type: "bigint" + assert_line :boolean_col, type: "boolean" + assert_line :decimal_col, type: "decimal", precision: 18 + assert_line :float_col, type: "float" + assert_line :string_col, type: "string" + assert_line :text_col, type: "text" + assert_line :datetime_nil_precision_col, type: "datetime", precision: nil + assert_line :datetime_col, type: "datetime" + assert_line :datetime_col, type: "datetime" + assert_line :timestamp_col, type: "datetime" + assert_line :time_col, type: "time", precision: 7 + assert_line :date_col, type: "date" + assert_line :binary_col, type: "binary" + + # Our type methods. + _(columns["real_col"].sql_type).must_equal "real" + _(columns["money_col"].sql_type).must_equal "money" + _(columns["smalldatetime_col"].sql_type).must_equal "smalldatetime" + _(columns["datetime2_col"].sql_type).must_equal "datetime2(7)" + _(columns["datetimeoffset"].sql_type).must_equal "datetimeoffset(7)" + _(columns["smallmoney_col"].sql_type).must_equal "smallmoney" + _(columns["char_col"].sql_type).must_equal "char(1)" + _(columns["varchar_col"].sql_type).must_equal "varchar(8000)" + _(columns["text_basic_col"].sql_type).must_equal "text" + _(columns["nchar_col"].sql_type).must_equal "nchar(1)" + _(columns["ntext_col"].sql_type).must_equal "ntext" + _(columns["binary_basic_col"].sql_type).must_equal "binary(1)" + _(columns["binary_basic_16_col"].sql_type).must_equal "binary(16)" + _(columns["varbinary_col"].sql_type).must_equal "varbinary(8000)" + _(columns["uuid_col"].sql_type).must_equal "uniqueidentifier" + _(columns["sstimestamp_col"].sql_type).must_equal "timestamp" + _(columns["json_col"].sql_type).must_equal "nvarchar(max)" + + assert_line :real_col, type: "real" + assert_line :money_col, type: "money", precision: 19, scale: 4 + assert_line :smalldatetime_col, type: "smalldatetime" + assert_line :datetime2_col, type: "datetime", precision: 7 + assert_line :datetimeoffset, type: "datetimeoffset", precision: 7 + assert_line :smallmoney_col, type: "smallmoney", precision: 10, scale: 4 + assert_line :char_col, type: "char", limit: 1 + assert_line :varchar_col, type: "varchar" + assert_line :text_basic_col, type: "text_basic" + assert_line :nchar_col, type: "nchar", limit: 1 + assert_line :ntext_col, type: "ntext" + assert_line :binary_basic_col, type: "binary_basic", limit: 1 + assert_line :binary_basic_16_col, type: "binary_basic", limit: 16 + assert_line :varbinary_col, type: "varbinary" + assert_line :uuid_col, type: "uuid" + assert_line :sstimestamp_col, type: "ss_timestamp", null: false + assert_line :json_col, type: "text" end - - context 'For strings' do - should 'have varchar(max) dumped as text' do - table_dump('sql_server_strings') do |output| - assert_match %r{t.text.*varchar_max}, output - end - end + it "dump column collation" do + generate_schema_for_table("sst_string_collation") + assert_line :string_without_collation, type: "string" + assert_line :string_default_collation, type: "varchar" + assert_line :string_with_collation, type: "varchar", collation: "SQL_Latin1_General_CP1_CS_AS" + assert_line :varchar_with_collation, type: "varchar", collation: "SQL_Latin1_General_CP1_CS_AS" end - - - private - - def find_all_tables - @all_tables ||= ActiveRecord::Base.connection.tables + + # Special Cases + + it "honor nonstandard primary keys" do + generate_schema_for_table("movies") do |output| + match = output.match(%r{create_table "movies"(.*)do}) + assert_not_nil(match, "non-standard primary key table not found") + assert_match %r{primary_key: "movieid"}, match[1], "non-standard primary key not preserved" + end end - - def standard_dump(ignore_tables = []) - stream = StringIO.new - ActiveRecord::SchemaDumper.ignore_tables = [*ignore_tables] - ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, stream) - stream.string + + it "no id with model driven primary key" do + output = generate_schema_for_table "sst_no_pk_data" + + _(output).must_match %r{create_table "sst_no_pk_data".*id:\sfalse.*do} + assert_line :name, type: "string" end - - def table_dump(*table_names) - stream = StringIO.new - ActiveRecord::SchemaDumper.ignore_tables = @all_tables-table_names - ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, stream) - yield stream.string - stream.string + + it "dumps field with unique key constraints only once" do + output = generate_schema_for_table "unique_key_dumped_table" + + _(output.scan('t.integer "unique_field"').length).must_equal(1) end - -end + it "schemas are dumped and tables names only include non-default schema" do + stream = StringIO.new + ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection_pool, stream) + generated_schema = stream.string + + # Only generate non-default schemas. Default schema is 'dbo'. + assert_not_includes generated_schema, 'create_schema "dbo"' + assert_not_includes generated_schema, 'create_schema "db_owner"' + assert_not_includes generated_schema, 'create_schema "INFORMATION_SCHEMA"' + assert_not_includes generated_schema, 'create_schema "sys"' + assert_not_includes generated_schema, 'create_schema "guest"' + assert_includes generated_schema, 'create_schema "test"' + assert_includes generated_schema, 'create_schema "test2"' -class SchemaDumperTest < ActiveRecord::TestCase - - COERCED_TESTS = [:test_schema_dump_keeps_large_precision_integer_columns_as_decimal] - - include SqlserverCoercedTest - - def test_coerced_schema_dump_keeps_large_precision_integer_columns_as_decimal - output = standard_dump - assert_match %r{t.decimal\s+"atoms_in_universe",\s+:precision => 38,\s+:scale => 0}, output + # Only non-default schemas should be included in table names. Default schema is 'dbo'. + assert_includes generated_schema, 'create_table "accounts"' + assert_includes generated_schema, 'create_table "test.aliens"' + assert_includes generated_schema, 'create_table "test2.sst_schema_test_multiple_schema"' end - + private - - def standard_dump + + def generate_schema_for_table(*table_names) + previous_ignore_tables = ActiveRecord::SchemaDumper.ignore_tables + ActiveRecord::SchemaDumper.ignore_tables = all_tables - table_names + stream = StringIO.new - ActiveRecord::SchemaDumper.ignore_tables = [] - ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, stream) - stream.string + ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection_pool, stream) + + @generated_schema = stream.string + yield @generated_schema if block_given? + @schema_lines = {} + type_matcher = /\A\s+t\.\w+\s+"(.*?)"[,\n]/ + @generated_schema.each_line do |line| + next unless line =~ type_matcher + + @schema_lines[Regexp.last_match[1]] = SchemaLine.new(line) + end + @generated_schema + ensure + ActiveRecord::SchemaDumper.ignore_tables = previous_ignore_tables end - -end + def line(column_name) + @schema_lines[column_name.to_s] + end + + def assert_line(column_name, expected_options = {}) + line = line(column_name) + assert line, "Could not find line with column name: #{column_name.inspect} in schema:\n#{schema}" + + # Check that the expected and actual option keys. + expected_options_keys = expected_options.keys + expected_options_keys.delete(:type) + _(expected_options_keys.sort).must_equal line.options.keys.sort, "For column '#{column_name}' expected schema options and actual schema options do not match." + + # Check the expected and actual option values. + expected_options.each do |key, expected| + actual = (key == :type) ? line.send(:type_method) : line.send(key) + + message = "#{key.to_s.titleize} of #{expected.inspect} not found in:\n#{line}" + + if expected.nil? + _(actual).must_be_nil message + elsif expected.is_a?(Array) + _(actual).must_include expected, message + elsif expected.is_a?(Float) + _(actual).must_be_close_to expected, 0.001 + elsif expected.is_a?(Proc) + _(actual.call).must_equal(expected.call) + else + _(actual).must_equal expected, message + end + end + end + + class SchemaLine + LINE_PARSER = %r{t\.(\w+)\s+"(.*?)"[,\s+](.*)} + + attr_reader :line, + :type_method, + :col_name, + :options + + def self.option(method_name) + define_method(method_name) do + if options.key?(method_name.to_sym) + options[method_name.to_sym] + else + throw "Schema line does include the '#{method_name}' option!" + end + end + end + + def initialize(line) + @line = line + @type_method, @col_name, @options = parse_line + end + + option :limit + option :precision + option :scale + option :default + option :collation + option :null + + def to_s + line.squish + end + + def inspect + "#" + end + + private + + def parse_line + _all, type_method, col_name, options = @line.match(LINE_PARSER).to_a + options = parse_options(options) + + [type_method, col_name, options] + end + def parse_options(opts) + if opts.present? + eval("{#{opts}}", binding, __FILE__, __LINE__) # standard:disable Security/Eval + else + {} + end + end + end +end diff --git a/test/cases/schema_test_sqlserver.rb b/test/cases/schema_test_sqlserver.rb index 1f1b0c5b2..fc645cf7f 100644 --- a/test/cases/schema_test_sqlserver.rb +++ b/test/cases/schema_test_sqlserver.rb @@ -1,71 +1,133 @@ -require 'cases/sqlserver_helper' +# frozen_string_literal: true -class SchemaTestSqlserver < ActiveRecord::TestCase - - def setup - @connection = ActiveRecord::Base.connection - end - - def read_schema_name(table_name) - ActiveRecord::ConnectionAdapters::Sqlserver::Utils.unqualify_table_schema(table_name) - end - - context 'When table is dbo schema' do - - should 'find primary key for tables with odd schema' do - assert_equal 'legacy_id', @connection.primary_key('natural_pk_data') - assert SqlServerNaturalPkData.columns_hash['legacy_id'].primary +require "cases/helper_sqlserver" + +class SchemaTestSQLServer < ActiveRecord::TestCase + describe "When table is dbo schema" do + it "find primary key for tables with odd schema" do + _(connection.primary_key("sst_natural_pk_data")).must_equal "legacy_id" end - end - - context 'When table is in non-dbo schema' do - - should 'work with #table_exists?' do - assert @connection.table_exists?('test.sql_server_schema_natural_id') + + describe "When table is in non-dbo schema" do + it "work with table exists" do + assert connection.data_source_exists?("test.sst_schema_natural_id") + assert connection.data_source_exists?("[test].[sst_schema_natural_id]") + end + + it "find primary key for tables with odd schema" do + _(connection.primary_key("test.sst_schema_natural_id")).must_equal "legacy_id" end - - should 'find primary key for tables with odd schema' do - assert_equal 'legacy_id', @connection.primary_key('test.sql_server_schema_natural_id') - assert SqlServerNaturalPkDataSchema.columns_hash['legacy_id'].primary + + it "have only one identity column" do + columns = connection.columns("test.sst_schema_identity") + + assert_equal 2, columns.size + assert_equal 1, columns.count { |c| c.is_identity? } end - - should "have only one identity column" do - columns = @connection.columns("test.sql_server_schema_identity") - assert_equal 2, columns.size - assert_equal 1, columns.select{ |c| c.primary }.size - end - - should "read only column properties for table in specific schema" do - test_columns = @connection.columns("test.sql_server_schema_columns") - dbo_columns = @connection.columns("dbo.sql_server_schema_columns") - columns = @connection.columns("sql_server_schema_columns") # This returns table from dbo schema - assert_equal 7, test_columns.size + + it "read only column properties for table in specific schema" do + test_columns = connection.columns("test.sst_schema_columns") + dbo_columns = connection.columns("dbo.sst_schema_columns") + columns = connection.columns("sst_schema_columns") # This returns table from dbo schema + + assert_equal 7, test_columns.size assert_equal 2, dbo_columns.size assert_equal 2, columns.size - assert_equal 1, test_columns.select{ |c| c.primary }.size - assert_equal 1, dbo_columns.select{ |c| c.primary }.size - assert_equal 1, columns.select{ |c| c.primary }.size - end - - should "return schema name in all cases" do - assert_nil read_schema_name("table") - assert_equal "schema1", read_schema_name("schema1.table") - assert_equal "schema2", read_schema_name("database.schema2.table") - assert_equal "schema3", read_schema_name("server.database.schema3.table") - assert_equal "schema3", read_schema_name("[server].[database].[schema3].[table]") - end - - should "return correct varchar and nvarchar column limit (length) when table is in non dbo schema" do - columns = @connection.columns("test.sql_server_schema_columns") - assert_equal 255, columns.find{|c| c.name == 'name'}.limit - assert_equal 1000, columns.find{|c| c.name == 'description'}.limit - assert_equal 255, columns.find{|c| c.name == 'n_name'}.limit - assert_equal 1000, columns.find{|c| c.name == 'n_description'}.limit + assert_equal 1, test_columns.count { |c| c.is_identity? } + assert_equal 1, dbo_columns.count { |c| c.is_identity? } + assert_equal 1, columns.count { |c| c.is_identity? } + end + + it "return correct varchar and nvarchar column limit length when table is in non-dbo schema" do + columns = connection.columns("test.sst_schema_columns") + + assert_equal 255, columns.find { |c| c.name == "name" }.limit + assert_equal 1000, columns.find { |c| c.name == "description" }.limit + assert_equal 255, columns.find { |c| c.name == "n_name" }.limit + assert_equal 1000, columns.find { |c| c.name == "n_description" }.limit end - end - - -end + describe "parsing table name from raw SQL" do + describe "SELECT statements" do + it do + assert_equal "[sst_schema_columns]", connection.send(:get_raw_table_name, "SELECT [sst_schema_columns].[id] FROM [sst_schema_columns]") + end + + it do + assert_equal "sst_schema_columns", connection.send(:get_raw_table_name, "SELECT [sst_schema_columns].[id] FROM sst_schema_columns") + end + + it do + assert_equal "[WITH - SPACES]", connection.send(:get_raw_table_name, "SELECT id FROM [WITH - SPACES]") + end + + it do + assert_equal "[WITH - SPACES$DOLLAR]", connection.send(:get_raw_table_name, "SELECT id FROM [WITH - SPACES$DOLLAR]") + end + + it do + assert_nil connection.send(:get_raw_table_name, nil) + end + end + + describe "INSERT statements" do + it do + assert_equal "[dashboards]", connection.send(:get_raw_table_name, "INSERT INTO [dashboards] DEFAULT VALUES; SELECT CAST(SCOPE_IDENTITY() AS bigint) AS Ident") + end + + it do + assert_equal "lock_without_defaults", connection.send(:get_raw_table_name, "INSERT INTO lock_without_defaults(title) VALUES('title1')") + end + + it do + assert_equal "json_data_type", connection.send(:get_raw_table_name, "insert into json_data_type (payload) VALUES ('null')") + end + + it do + assert_equal "[auto_increments]", connection.send(:get_raw_table_name, "INSERT INTO [auto_increments] OUTPUT INSERTED.[id] DEFAULT VALUES") + end + + it do + assert_equal "[WITH - SPACES]", connection.send(:get_raw_table_name, "EXEC sp_executesql N'INSERT INTO [WITH - SPACES] ([external_id]) OUTPUT INSERTED.[id] VALUES (@0)', N'@0 bigint', @0 = 10") + end + + it do + assert_equal "[test].[aliens]", connection.send(:get_raw_table_name, "EXEC sp_executesql N'INSERT INTO [test].[aliens] ([name]) OUTPUT INSERTED.[id] VALUES (@0)', N'@0 varchar(255)', @0 = 'Trisolarans'") + end + + it do + assert_equal "[with].[select notation]", connection.send(:get_raw_table_name, "INSERT INTO [with].[select notation] SELECT * FROM [table_name]") + end + end + + describe "MERGE statements" do + it do + assert_equal "[dashboards]", connection.send(:get_raw_table_name, "MERGE INTO [dashboards] AS target") + end + + it do + assert_equal "lock_without_defaults", connection.send(:get_raw_table_name, "MERGE INTO lock_without_defaults AS target") + end + + it do + assert_equal "[WITH - SPACES]", connection.send(:get_raw_table_name, "MERGE INTO [WITH - SPACES] AS target") + end + + it do + assert_equal "[with].[select notation]", connection.send(:get_raw_table_name, "MERGE INTO [with].[select notation] AS target") + end + + it do + assert_equal "[with_numbers_1234]", connection.send(:get_raw_table_name, "MERGE INTO [with_numbers_1234] AS target") + end + end + + describe "CREATE VIEW statements" do + it do + assert_equal "test_table_as", connection.send(:get_raw_table_name, "CREATE VIEW test_views ( test_table_a_id, test_table_b_id ) AS SELECT test_table_as.id as test_table_a_id, test_table_bs.id as test_table_b_id FROM (test_table_as with(nolock) LEFT JOIN test_table_bs with(nolock) ON (test_table_as.id = test_table_bs.test_table_a_id))") + end + end + end +end diff --git a/test/cases/scratch_test_sqlserver.rb b/test/cases/scratch_test_sqlserver.rb deleted file mode 100644 index 8a8b1d9d8..000000000 --- a/test/cases/scratch_test_sqlserver.rb +++ /dev/null @@ -1,11 +0,0 @@ -require 'cases/sqlserver_helper' - -class ScratchTestSqlserver < ActiveRecord::TestCase - - should 'pass' do - assert true - end - - -end - diff --git a/test/cases/session_test_sqlserver.rb b/test/cases/session_test_sqlserver.rb deleted file mode 100644 index e19c96c98..000000000 --- a/test/cases/session_test_sqlserver.rb +++ /dev/null @@ -1,19 +0,0 @@ -require 'cases/sqlserver_helper' -require 'action_dispatch' -require 'active_record/session_store' - -module ActiveRecord - class SessionStore - class SessionTest < ActiveRecord::TestCase - - setup :reset_column_information_for_each_test - - protected - - def reset_column_information_for_each_test - Session.reset_column_information - end - - end - end -end diff --git a/test/cases/showplan_test_sqlserver.rb b/test/cases/showplan_test_sqlserver.rb index 6c5a22ac5..6dfde5386 100644 --- a/test/cases/showplan_test_sqlserver.rb +++ b/test/cases/showplan_test_sqlserver.rb @@ -1,77 +1,65 @@ -require 'cases/sqlserver_helper' -require 'models/car' +# frozen_string_literal: true -class ShowplanTestSqlserver < ActiveRecord::TestCase - +require "cases/helper_sqlserver" +require "models/car" + +class ShowplanTestSQLServer < ActiveRecord::TestCase fixtures :cars - - context 'Unprepare previously prepared SQL' do - - should 'from simple statement' do - plan = Car.where(:id => 1).explain - assert plan.starts_with?("EXPLAIN for: SELECT [cars].* FROM [cars] WHERE [cars].[id] = 1") - assert plan.include?("Clustered Index Seek"), 'make sure we do not showplan the sp_executesql' + + describe "Unprepare previously prepared SQL" do + it "from simple statement" do + plan = Car.where(id: 1).explain.inspect + _(plan).must_include "SELECT [cars].* FROM [cars] WHERE [cars].[id] = 1" + _(plan).must_include "Clustered Index Seek", "make sure we do not showplan the sp_executesql" end - should 'from multiline statement' do - plan = Car.where("\n id = 1 \n").explain - assert plan.starts_with?("EXPLAIN for: SELECT [cars].* FROM [cars] WHERE (\n id = 1 \n)") - assert plan.include?("Clustered Index Seek"), 'make sure we do not showplan the sp_executesql' + it "from multiline statement" do + plan = Car.where("\n id = 1 \n").explain.inspect + _(plan).must_include "SELECT [cars].* FROM [cars] WHERE (\n id = 1 \n)" + _(plan).must_include "Clustered Index Seek", "make sure we do not showplan the sp_executesql" end - - should 'from prepared statement' do - plan = capture_logger do - with_threshold(0) { Car.find(1) } - end - assert plan.include?('EXPLAIN for: SELECT TOP (1) [cars].* FROM [cars] WHERE [cars].[id] = @0 [["id", 1]]') - assert plan.include?("Clustered Index Seek"), 'make sure we do not showplan the sp_executesql' + + it "from prepared statement" do + plan = Car.where(name: ",").limit(1).explain.inspect + _(plan).must_include "SELECT [cars].* FROM [cars] WHERE [cars].[name]" + _(plan).must_include "TOP EXPRESSION", "make sure we do not showplan the sp_executesql" + _(plan).must_include "Clustered Index Scan", "make sure we do not showplan the sp_executesql" end - - should 'from prepared statement ...' do - plan = capture_logger do - with_threshold(0) { Car.where(:name => ',').first } - end - assert plan.include?("SELECT TOP (1) [cars].* FROM [cars] WHERE [cars].[name] = N','") - assert plan.include?("TOP EXPRESSION"), 'make sure we do not showplan the sp_executesql' - assert plan.include?("Clustered Index Scan"), 'make sure we do not showplan the sp_executesql' + + it "from array condition using index" do + plan = Car.where(id: [1, 2]).explain.inspect + _(plan).must_include "SELECT [cars].* FROM [cars] WHERE [cars].[id] IN (@0, @1)" + _(plan).must_include "Clustered Index Seek", "make sure we do not showplan the sp_executesql" + end + + it "from array condition" do + plan = Car.where(name: ["honda", "zyke"]).explain.inspect + _(plan).must_include " SELECT [cars].* FROM [cars] WHERE [cars].[name] IN (@0, @1)" + _(plan).must_include "Clustered Index Scan", "make sure we do not showplan the sp_executesql" end - end - - context 'With SHOWPLAN_TEXT option' do - - should 'use simple table printer' do - with_showplan_option('SHOWPLAN_TEXT') do - plan = Car.where(:id => 1).explain - assert plan.starts_with?("EXPLAIN for: SELECT [cars].* FROM [cars] WHERE [cars].[id] = 1") - assert plan.include?("Clustered Index Seek"), 'make sure we do not showplan the sp_executesql' + + describe "With SHOWPLAN_TEXT option" do + it "use simple table printer" do + with_showplan_option("SHOWPLAN_TEXT") do + plan = Car.where(id: 1).explain.inspect + _(plan).must_include "SELECT [cars].* FROM [cars] WHERE [cars].[id]" + _(plan).must_include "Clustered Index Seek", "make sure we do not showplan the sp_executesql" end end - end - - context 'With SHOWPLAN_XML option' do - - should 'show formatted xml' do - with_showplan_option('SHOWPLAN_XML') do - plan = Car.where(:id => 1).explain - assert plan.include?('ShowPlanXML') + + describe "With SHOWPLAN_XML option" do + it "show formatted xml" do + with_showplan_option("SHOWPLAN_XML") do + plan = Car.where(id: 1).explain.inspect + _(plan).must_include "ShowPlanXML" end end - end - - + private - - def base - ActiveRecord::Base - end - def connection - base.connection - end - def with_showplan_option(option) old_option = ActiveRecord::ConnectionAdapters::SQLServerAdapter.showplan_option ActiveRecord::ConnectionAdapters::SQLServerAdapter.showplan_option = option @@ -79,36 +67,4 @@ def with_showplan_option(option) ensure ActiveRecord::ConnectionAdapters::SQLServerAdapter.showplan_option = old_option end - - def with_threshold(threshold) - current_threshold = base.auto_explain_threshold_in_seconds - base.auto_explain_threshold_in_seconds = threshold - yield - ensure - base.auto_explain_threshold_in_seconds = current_threshold - end - - def capture_logger - original_logger = base.logger - log = StringIO.new - base.logger = Logger.new(log) - base.logger.level = Logger::WARN - yield - log.string - ensure - base.logger = original_logger - end - - def capture_queries - base.auto_explain_threshold_in_seconds = nil - queries = Thread.current[:available_queries_for_explain] = [] - with_threshold(0) do - yield - end - queries - ensure - Thread.current[:available_queries_for_explain] = nil - end - - end diff --git a/test/cases/specific_schema_test_sqlserver.rb b/test/cases/specific_schema_test_sqlserver.rb index c3537af8b..0829012e5 100644 --- a/test/cases/specific_schema_test_sqlserver.rb +++ b/test/cases/specific_schema_test_sqlserver.rb @@ -1,211 +1,177 @@ -require 'cases/sqlserver_helper' - -class SpecificSchemaTestSqlserver < ActiveRecord::TestCase - - should 'be able to complex count tables with no primary key' do - NoPkData.delete_all - 10.times { |n| NoPkData.create! :name => "Test#{n}" } - assert_equal 1, NoPkData.where(:name => 'Test5').count - end - - should 'quote table names properly even when they are views' do - obj = SqlServerQuotedTable.create! - assert_nothing_raised { SqlServerQuotedTable.first } - obj = SqlServerQuotedView1.create! - assert_nothing_raised { SqlServerQuotedView1.first } - obj = SqlServerQuotedView2.create! - assert_nothing_raised { SqlServerQuotedView2.first } - end - - should 'cope with multi line defaults' do - default = StringDefault.new - assert_equal "Some long default with a\nnew line.", default.string_with_multiline_default - end - - should 'default strings before save' do - default = StringDefault.new - assert_equal nil, default.string_with_null_default - assert_equal 'null', default.string_with_pretend_null_one - assert_equal '(null)', default.string_with_pretend_null_two - assert_equal 'NULL', default.string_with_pretend_null_three - assert_equal '(NULL)', default.string_with_pretend_null_four - assert_equal '(3)', default.string_with_pretend_paren_three - end - - should 'default strings after save' do - default = StringDefault.create - assert_equal nil, default.string_with_null_default - assert_equal 'null', default.string_with_pretend_null_one - assert_equal '(null)', default.string_with_pretend_null_two - assert_equal 'NULL', default.string_with_pretend_null_three - assert_equal '(NULL)', default.string_with_pretend_null_four - end - - context 'Testing edge case schemas' do - - setup do - @edge_class = SqlServerEdgeSchema +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class SpecificSchemaTestSQLServer < ActiveRecord::TestCase + after { SSTestEdgeSchema.delete_all } + + it "handle dollar symbols" do + assert_difference("SSTestDollarTableName.count", 1) do + SSTestDollarTableName.create! end - - context 'with tinyint primary key' do - - should 'work with identity inserts and finders' do - record = SqlServerTinyintPk.new :name => '1' - record.id = 1 - record.save! - assert_equal record, SqlServerTinyintPk.find(1) - end - + assert_nothing_raised do + SSTestDollarTableName.limit(20).offset(1) end - - context 'with natural primary keys' do - - should 'work with identity inserts' do - record = SqlServerNaturalPkData.new :name => 'Test', :description => 'Natural identity inserts.' - record.id = '12345ABCDE' - assert record.save - assert_equal '12345ABCDE', record.reload.id - end - - should 'work with identity inserts when the key is an int' do - record = SqlServerNaturalPkIntData.new :name => 'Test', :description => 'Natural identity inserts.' - record.id = 12 - assert record.save - assert_equal 12, record.reload.id - end - - should 'use primary key for row table order in pagination sql' do - sql = /OVER \(ORDER BY \[natural_pk_data\]\.\[legacy_id\] ASC\)/ - assert_sql(sql) { SqlServerNaturalPkData.limit(5).offset(5).all } - end + end - end - - context 'with special quoted column' do - - should 'work as normal' do - @edge_class.delete_all - r = @edge_class.create! 'crazy]]quote' => 'crazyqoute' - assert @edge_class.columns_hash['crazy]]quote'] - assert_equal r, @edge_class.first(:conditions => {'crazy]]quote' => 'crazyqoute'}) - end + it "models can use tinyint pk tables" do + obj = SSTestTinyintPk.create! name: "1" + _(["Fixnum", "Integer"]).must_include obj.id.class.name + _(SSTestTinyintPk.find(obj.id)).must_equal obj + end - end + it "be able to complex count tables with no primary key" do + SSTestNoPkData.delete_all + 10.times { |n| SSTestNoPkData.create! name: "Test#{n}" } + assert_equal 1, SSTestNoPkData.where(name: "Test5").count + end - context 'with column names that have spaces' do + it "quote table names properly even when they are views" do + SSTestQuotedTable.create! + assert_nothing_raised { assert SSTestQuotedTable.first } - should 'create record using a custom attribute reader and be able to load it back in' do - value = 'Saved value into a column that has a space in the name.' - record = @edge_class.create! :with_spaces => value - assert_equal value, @edge_class.find(record.id).with_spaces - end - - end - - context 'with description column' do + SSTestQuotedTableUser.create! + assert_nothing_raised { assert SSTestQuotedTableUser.first } - setup do - @da = @edge_class.create! :description => 'A' - @db = @edge_class.create! :description => 'B' - @dc = @edge_class.create! :description => 'C' - end - - teardown { @edge_class.delete_all } - - should 'allow all sorts of ordering without adapter munging it up' do - assert_equal ['A','B','C'], @edge_class.all(:order => 'description').map(&:description) - assert_equal ['A','B','C'], @edge_class.all(:order => 'description asc').map(&:description) - assert_equal ['A','B','C'], @edge_class.all(:order => 'description ASC').map(&:description) - assert_equal ['C','B','A'], @edge_class.all(:order => 'description desc').map(&:description) - assert_equal ['C','B','A'], @edge_class.all(:order => 'description DESC').map(&:description) - end + SSTestQuotedView1.create! + assert_nothing_raised { assert SSTestQuotedView1.first } - end - - context 'with bigint column' do - - setup do - @b5k = 5000 - @bi5k = @edge_class.create! :bigint => @b5k, :description => 'Five Thousand' - @bnum = 9_000_000_000_000_000_000 - @bimjr = @edge_class.create! :bigint => @bnum, :description => 'Close to max bignum' - end + SSTestQuotedView2.create! + assert_nothing_raised { assert SSTestQuotedView2.first } + end - should 'can find by biginit' do - assert_equal @bi5k, @edge_class.find_by_bigint(@b5k) - assert_equal @b5k, @edge_class.find(:first, :select => 'bigint', :conditions => {:bigint => @b5k}).bigint - assert_equal @bimjr, @edge_class.find_by_bigint(@bnum) - assert_equal @bnum, @edge_class.find(:first, :select => 'bigint', :conditions => {:bigint => @bnum}).bigint - end + it "cope with multi line defaults" do + default = SSTestStringDefault.new + assert_equal "Some long default with a\nnew line.", default.string_with_multiline_default + end - end - - context 'with tinyint column' do + it "default strings before save" do + default = SSTestStringDefault.new + assert_nil default.string_with_null_default + assert_equal "null", default.string_with_pretend_null_one + assert_equal "(null)", default.string_with_pretend_null_two + assert_equal "NULL", default.string_with_pretend_null_three + assert_equal "(NULL)", default.string_with_pretend_null_four + assert_equal "(3)", default.string_with_pretend_paren_three + end - setup do - @tiny1 = @edge_class.create! :tinyint => 1 - @tiny255 = @edge_class.create! :tinyint => 255 - end + it "default strings after save" do + default = SSTestStringDefault.create + assert_nil default.string_with_null_default + assert_equal "null", default.string_with_pretend_null_one + assert_equal "(null)", default.string_with_pretend_null_two + assert_equal "NULL", default.string_with_pretend_null_three + assert_equal "(NULL)", default.string_with_pretend_null_four + end - should 'not treat tinyint like boolean as mysql does' do - assert_equal 1, @edge_class.find_by_tinyint(1).tinyint - assert_equal 255, @edge_class.find_by_tinyint(255).tinyint - end - - should 'throw an error when going out of our tiny int bounds' do - assert_raise(ActiveRecord::StatementInvalid) { @edge_class.create! :tinyint => 256 } - end - - end - - context 'with uniqueidentifier column' do + it "default objects work" do + obj = SSTestObjectDefault.create! name: "MetaSkills" + _(obj.date).must_be_nil "since this is set on insert" + _(obj.reload.date).must_be_instance_of Date + end - setup do - @newid = ActiveRecord::Base.connection.newid_function - assert_guid @newid - end + it "allows datetime2 as timestamps" do + _(SSTestBooking.columns_hash["created_at"].sql_type).must_equal "datetime2(7)" + _(SSTestBooking.columns_hash["updated_at"].sql_type).must_equal "datetime2(7)" + obj1 = SSTestBooking.new name: "test1" + obj1.save! + _(obj1.created_at).must_be_instance_of Time + _(obj1.updated_at).must_be_instance_of Time + end - should 'allow a simple insert and read of a column without a default function' do - obj = @edge_class.create! :guid => @newid - assert_equal @newid, @edge_class.find(obj.id).guid - end - - should 'record the default function name in the column definition but still show a nil real default, will use one day for insert/update' do - newid_column = @edge_class.columns_hash['guid_newid'] - assert newid_column.default_function.present? - assert_nil newid_column.default - assert_equal 'newid()', newid_column.default_function - newseqid_column = @edge_class.columns_hash['guid_newseqid'] - assert newseqid_column.default_function.present? - assert_nil newseqid_column.default - assert_equal 'newsequentialid()', newseqid_column.default_function - end - - should 'use model callback to set get a new guid' do - obj = @edge_class.new - obj.new_id_setting = true - obj.save! - assert_guid obj.guid_newid - end + # Natural primary keys. - end - - context 'with strange table names' do - - should 'handle dollar symbols' do - SqlServerDollarTableName.new.save - SqlServerDollarTableName.limit(20).offset(1).all + it "work with identity inserts" do + record = SSTestNaturalPkData.new name: "Test", description: "Natural identity inserts." + record.id = "12345ABCDE" + assert record.save + assert_equal "12345ABCDE", record.reload.id + end + + it "work with identity inserts when the key is an int" do + record = SSTestNaturalPkIntData.new name: "Test", description: "Natural identity inserts." + record.id = 12 + assert record.save + assert_equal 12, record.reload.id + end + + it "use primary key for row table order in pagination sql" do + sql = /ORDER BY \[sst_natural_pk_data\]\.\[legacy_id\] ASC OFFSET @0 ROWS FETCH NEXT @1 ROWS ONLY/ + assert_queries_match(sql) { SSTestNaturalPkData.limit(5).offset(5).load } + end + + # Special quoted column + + it "work as normal" do + SSTestEdgeSchema.delete_all + r = SSTestEdgeSchema.create! "crazy]]quote" => "crazyqoute" + assert SSTestEdgeSchema.columns_hash["crazy]]quote"] + assert_equal r, SSTestEdgeSchema.where("crazy]]quote" => "crazyqoute").first + end + + it "various methods to bypass national quoted columns for any column, but primarily useful for char/varchar" do + value = Class.new do + def quoted_id + "'T'" end - end - + # Using ActiveRecord's quoted_id feature for objects. + assert_queries_and_values_match(/.*/, ["'T'", 1]) { SSTestDatatypeMigration.where(char_col: value.new).first } + assert_queries_and_values_match(/.*/, ["'T'", 1]) { SSTestDatatypeMigration.where(varchar_col: value.new).first } + # Using our custom char type data. + type = ActiveRecord::Type::SQLServer::Char + data = ActiveRecord::Type::SQLServer::Data + assert_queries_and_values_match(/.*/, ["'T'", 1]) { SSTestDatatypeMigration.where(char_col: data.new("T", type.new)).first } + assert_queries_and_values_match(/.*/, ["'T'", 1]) { SSTestDatatypeMigration.where(varchar_col: data.new("T", type.new)).first } + # Taking care of everything. + assert_queries_and_values_match(/.*/, ["'T'", 1]) { SSTestDatatypeMigration.where(char_col: "T").first } + assert_queries_and_values_match(/.*/, ["'T'", 1]) { SSTestDatatypeMigration.where(varchar_col: "T").first } + end + + it "can update and hence properly quoted non-national char/varchar columns" do + o = SSTestDatatypeMigration.create! + o.varchar_col = "O'Reilly" + o.save! + _(o.reload.varchar_col).must_equal "O'Reilly" + o.varchar_col = nil + o.save! + _(o.reload.varchar_col).must_be_nil + end + + # With column names that have spaces + + it "create record using a custom attribute reader and be able to load it back in" do + value = "Saved value into a column that has a space in the name." + record = SSTestEdgeSchema.create! with_spaces: value + assert_equal value, SSTestEdgeSchema.find(record.id).with_spaces end - - - protected - - def assert_guid(guid) - assert_match %r|\w{8}-\w{4}-\w{4}-\w{4}-\w{12}|, guid + + # With description column + + it "allow all sorts of ordering without adapter munging it up with special description column" do + SSTestEdgeSchema.create! description: "A" + SSTestEdgeSchema.create! description: "B" + SSTestEdgeSchema.create! description: "C" + assert_equal ["A", "B", "C"], SSTestEdgeSchema.order("description").map(&:description) + assert_equal ["A", "B", "C"], SSTestEdgeSchema.order("description asc").map(&:description) + assert_equal ["A", "B", "C"], SSTestEdgeSchema.order("description ASC").map(&:description) + assert_equal ["C", "B", "A"], SSTestEdgeSchema.order("description desc").map(&:description) + assert_equal ["C", "B", "A"], SSTestEdgeSchema.order("description DESC").map(&:description) + end + + # For uniqueidentifier model helpers + + it "returns a new id via connection newid_function" do + acceptable_uuid = ActiveRecord::ConnectionAdapters::SQLServer::Type::Uuid::ACCEPTABLE_UUID + db_uuid = ActiveRecord::Base.lease_connection.newid_function + _(db_uuid).must_match(acceptable_uuid) + end + + # with similar table definition in two schemas + + it "returns the correct primary columns" do + connection = ActiveRecord::Base.lease_connection + assert_equal "field_1", connection.columns("test.sst_schema_test_multiple_schema").detect(&:is_primary?).name + assert_equal "field_2", connection.columns("test2.sst_schema_test_multiple_schema").detect(&:is_primary?).name end - end diff --git a/test/cases/sqlserver_helper.rb b/test/cases/sqlserver_helper.rb deleted file mode 100644 index f5e089c08..000000000 --- a/test/cases/sqlserver_helper.rb +++ /dev/null @@ -1,150 +0,0 @@ - -SQLSERVER_TEST_ROOT = File.expand_path(File.join(File.dirname(__FILE__),'..')) -SQLSERVER_ASSETS_ROOT = File.expand_path(File.join(SQLSERVER_TEST_ROOT,'assets')) -SQLSERVER_FIXTURES_ROOT = File.expand_path(File.join(SQLSERVER_TEST_ROOT,'fixtures')) -SQLSERVER_MIGRATIONS_ROOT = File.expand_path(File.join(SQLSERVER_TEST_ROOT,'migrations')) -SQLSERVER_SCHEMA_ROOT = File.expand_path(File.join(SQLSERVER_TEST_ROOT,'schema')) -ACTIVERECORD_TEST_ROOT = File.expand_path(File.join(Gem.loaded_specs['activerecord'].full_gem_path,'test')) -ENV['ARCONFIG'] = File.expand_path(File.join(SQLSERVER_TEST_ROOT,'config.yml')) - -$:.unshift ACTIVERECORD_TEST_ROOT - -require 'rubygems' -require 'bundler' -Bundler.setup -require 'shoulda' -require 'mocha' -require 'active_support/dependencies' -require 'active_record' -require 'active_record/version' -require 'active_record/connection_adapters/abstract_adapter' -require 'cases/helper' -require 'models/topic' - -GC.copy_on_write_friendly = true if GC.respond_to?(:copy_on_write_friendly?) - -ActiveRecord::Migration.verbose = false -ActiveRecord::Base.logger = Logger.new(File.expand_path(File.join(SQLSERVER_TEST_ROOT,'debug.log'))) -ActiveRecord::Base.logger.level = 0 - -# Defining our classes in one place as well as soem core tests that need coercing date/time types. - -class UpperTestDefault < ActiveRecord::Base ; self.table_name = 'UPPER_TESTS' ; end -class UpperTestLowered < ActiveRecord::Base ; self.table_name = 'upper_tests' ; end -class TableWithRealColumn < ActiveRecord::Base; end -class FkTestHasFk < ActiveRecord::Base ; end -class FkTestHasPk < ActiveRecord::Base ; end -class NumericData < ActiveRecord::Base ; self.table_name = 'numeric_data' ; end -class FloatData < ActiveRecord::Base ; self.table_name = 'float_data' ; end -class CustomersView < ActiveRecord::Base ; self.table_name = 'customers_view' ; end -class StringDefaultsView < ActiveRecord::Base ; self.table_name = 'string_defaults_view' ; end -class StringDefaultsBigView < ActiveRecord::Base ; self.table_name = 'string_defaults_big_view' ; end -class SqlServerNaturalPkData < ActiveRecord::Base ; self.table_name = 'natural_pk_data' ; self.primary_key = 'legacy_id' ; end -class SqlServerTinyintPk < ActiveRecord::Base ; self.table_name = 'tinyint_pk_table' ; end -class SqlServerNaturalPkIntData < ActiveRecord::Base ; self.table_name = 'natural_pk_int_data' ; end -class SqlServerOrderRowNumber < ActiveRecord::Base ; self.table_name = 'order_row_number' ; end -class SqlServerNaturalPkDataSchema < ActiveRecord::Base ; self.table_name = 'test.sql_server_schema_natural_id' ; end -class SqlServerQuotedTable < ActiveRecord::Base ; self.table_name = 'quoted-table' ; end -class SqlServerQuotedView1 < ActiveRecord::Base ; self.table_name = 'quoted-view1' ; end -class SqlServerQuotedView2 < ActiveRecord::Base ; self.table_name = 'quoted-view2' ; end -class SqlServerUnicode < ActiveRecord::Base ; end -class SqlServerString < ActiveRecord::Base ; end -class NoPkData < ActiveRecord::Base ; self.table_name = 'no_pk_data' ; end -class StringDefault < ActiveRecord::Base; end -class SqlServerEdgeSchema < ActiveRecord::Base - attr_accessor :new_id_setting - before_create :set_new_id - def with_spaces - read_attribute :'with spaces' - end - def with_spaces=(value) - write_attribute :'with spaces', value - end - protected - def set_new_id - self[:guid_newid] ||= connection.newid_function if new_id_setting - end -end -class SqlServerDollarTableName < ActiveRecord::Base - self.table_name = 'my$strange_table' -end -class SqlServerChronic < ActiveRecord::Base - coerce_sqlserver_date :date - coerce_sqlserver_time :time - default_timezone = :utc -end -class Topic < ActiveRecord::Base - coerce_sqlserver_date :last_read - coerce_sqlserver_time :bonus_time -end -class Person < ActiveRecord::Base - coerce_sqlserver_date :favorite_day -end - -# A module that we can include in classes where we want to override an active record test. - -module SqlserverCoercedTest - def self.included(base) - base.extend ClassMethods - end - module ClassMethods - def coerced_tests - self.const_get(:COERCED_TESTS) rescue nil - end - def method_added(method) - if coerced_tests && coerced_tests.include?(method) - undef_method(method) rescue nil - STDOUT.puts("Undefined coerced test: #{self.name}##{method}") - end - end - end -end - - -# Our changes/additions to ActiveRecord test helpers specific for SQL Server. - -module ActiveRecord - class SQLCounter - self.ignored_sql.concat([ - %r|SELECT SCOPE_IDENTITY|, %r{INFORMATION_SCHEMA\.(TABLES|VIEWS|COLUMNS)}, - %r|SELECT @@version|, %r|SELECT @@TRANCOUNT|, %r{(BEGIN|COMMIT|ROLLBACK|SAVE) TRANSACTION} - ]) - end -end - -module ActiveRecord - class TestCase < ActiveSupport::TestCase - class << self - def connection_mode_dblib? ; ActiveRecord::Base.connection.instance_variable_get(:@connection_options)[:mode] == :dblib ; end - def connection_mode_odbc? ; ActiveRecord::Base.connection.instance_variable_get(:@connection_options)[:mode] == :odbc ; end - def sqlserver_2005? ; ActiveRecord::Base.connection.sqlserver_2005? ; end - def sqlserver_2008? ; ActiveRecord::Base.connection.sqlserver_2008? ; end - def sqlserver_azure? ; ActiveRecord::Base.connection.sqlserver_azure? ; end - def ruby_19? ; RUBY_VERSION >= '1.9' ; end - end - def connection_mode_dblib? ; self.class.connection_mode_dblib? ; end - def connection_mode_odbc? ; self.class.connection_mode_odbc? ; end - def sqlserver_2005? ; self.class.sqlserver_2005? ; end - def sqlserver_2008? ; self.class.sqlserver_2008? ; end - def sqlserver_azure? ; self.class.sqlserver_azure? ; end - def ruby_19? ; self.class.ruby_19? ; end - def with_enable_default_unicode_types? - ActiveRecord::ConnectionAdapters::SQLServerAdapter.enable_default_unicode_types.is_a?(TrueClass) - end - def with_enable_default_unicode_types(setting) - old_setting = ActiveRecord::ConnectionAdapters::SQLServerAdapter.enable_default_unicode_types - old_text = ActiveRecord::ConnectionAdapters::SQLServerAdapter.native_text_database_type - old_string = ActiveRecord::ConnectionAdapters::SQLServerAdapter.native_string_database_type - ActiveRecord::ConnectionAdapters::SQLServerAdapter.enable_default_unicode_types = setting - ActiveRecord::ConnectionAdapters::SQLServerAdapter.native_text_database_type = nil - ActiveRecord::ConnectionAdapters::SQLServerAdapter.native_string_database_type = nil - yield - ensure - ActiveRecord::ConnectionAdapters::SQLServerAdapter.enable_default_unicode_types = old_setting - ActiveRecord::ConnectionAdapters::SQLServerAdapter.native_text_database_type = old_text - ActiveRecord::ConnectionAdapters::SQLServerAdapter.native_string_database_type = old_string - end - end -end - - diff --git a/test/cases/table_name_test_sqlserver.rb b/test/cases/table_name_test_sqlserver.rb deleted file mode 100644 index e8b33a1f5..000000000 --- a/test/cases/table_name_test_sqlserver.rb +++ /dev/null @@ -1,38 +0,0 @@ -require 'cases/sqlserver_helper' -require 'models/order' - -class SqlServerRailsOrders < ActiveRecord::Base - self.table_name = 'rails.orders' -end - -class TableNameTestSqlserver < ActiveRecord::TestCase - - self.use_transactional_fixtures = false - - def setup - Order.table_name = '[orders]' - Order.reset_column_information - end - - should 'load columns with escaped table name for model' do - assert_equal 4, Order.columns.length - end - - should 'not re-escape table name if it is escaped already for SQL queries' do - assert_sql(/SELECT \[orders\]\.\* FROM \[orders\]/) { Order.all } - end - - context 'Table scoped to user.table_name' do - - setup do - @klass = SqlServerRailsOrders - end - - should 'have no issue doing basic column reflection' do - assert_nothing_raised() { @klass.columns } - end - - end - - -end diff --git a/test/cases/temp_test_sqlserver.rb b/test/cases/temp_test_sqlserver.rb new file mode 100644 index 000000000..c9fae9490 --- /dev/null +++ b/test/cases/temp_test_sqlserver.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class TempTestSQLServer < ActiveRecord::TestCase + # it "assert true" do + # assert true + # end +end diff --git a/test/cases/temporary_table_test_sqlserver.rb b/test/cases/temporary_table_test_sqlserver.rb new file mode 100644 index 000000000..0ab808a70 --- /dev/null +++ b/test/cases/temporary_table_test_sqlserver.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class TemporaryTableSQLServer < ActiveRecord::TestCase + def test_insert_into_temporary_table + ActiveRecord::Base.with_connection do |conn| + conn.exec_query("CREATE TABLE #temp_users (id INT IDENTITY(1,1), name NVARCHAR(100))") + + result = conn.exec_query("SELECT * FROM #temp_users") + assert_equal 0, result.count + + conn.exec_query("INSERT INTO #temp_users (name) VALUES ('John'), ('Doe')") + + result = conn.exec_query("SELECT * FROM #temp_users") + assert_equal 2, result.count + end + end +end diff --git a/test/cases/transaction_test_sqlserver.rb b/test/cases/transaction_test_sqlserver.rb index 48c2e9998..6c661e7a7 100644 --- a/test/cases/transaction_test_sqlserver.rb +++ b/test/cases/transaction_test_sqlserver.rb @@ -1,93 +1,92 @@ -require 'cases/sqlserver_helper' -require 'models/ship' -require 'models/developer' +# frozen_string_literal: true -class TransactionTestSqlserver < ActiveRecord::TestCase - - self.use_transactional_fixtures = false - - setup :delete_ships - - context 'Testing transaction basics' do - - should 'allow ActiveRecord::Rollback to work in 1 transaction block' do - Ship.transaction do - Ship.create! :name => 'Black Pearl' - raise ActiveRecord::Rollback - end - assert_no_ships - end - - should 'allow nested transactions to totally rollback' do - begin - Ship.transaction do - Ship.create! :name => 'Black Pearl' - Ship.transaction do - Ship.create! :name => 'Flying Dutchman' - raise 'HELL' - end - end - rescue Exception => e - assert_no_ships - end - end +require "cases/helper_sqlserver" +require "models/ship" +require "models/developer" - end - - context 'Testing #outside_transaction?' do - - should 'work in simple usage' do - assert Ship.connection.outside_transaction? - Ship.connection.begin_db_transaction - assert !Ship.connection.outside_transaction? - Ship.connection.rollback_db_transaction - assert Ship.connection.outside_transaction? +class TransactionTestSQLServer < ActiveRecord::TestCase + self.use_transactional_tests = false + + before { delete_ships } + + it "allow ActiveRecord::Rollback to work in 1 transaction block" do + Ship.transaction do + Ship.create! name: "Black Pearl" + raise ActiveRecord::Rollback end - - should 'work inside nested transactions' do - assert Ship.connection.outside_transaction? + assert_no_ships + end + + it "allow nested transactions to totally rollback" do + Ship.transaction do + Ship.create! name: "Black Pearl" Ship.transaction do - assert !Ship.connection.outside_transaction? - Ship.transaction do - assert !Ship.connection.outside_transaction? - end + Ship.create! name: "Flying Dutchman" + raise "HELL" end - assert Ship.connection.outside_transaction? end - - should 'not call rollback if no transaction is active' do - assert_raise RuntimeError do - Ship.transaction do - Ship.connection.rollback_db_transaction - Ship.connection.expects(:rollback_db_transaction).never - raise "Rails doesn't scale!" - end - end + rescue + assert_no_ships + end + + it "can use an isolation level and reverts back to starting isolation level" do + in_level = nil + begin_level = connection.user_options_isolation_level + _(begin_level).must_match %r{read committed}i + Ship.transaction(isolation: :serializable) do + Ship.create! name: "Black Pearl" + in_level = connection.user_options_isolation_level end - - should 'test_open_transactions_count_is_reset_to_zero_if_no_transaction_active' do - Ship.transaction do - Ship.transaction do - Ship.connection.rollback_db_transaction - end - assert_equal 0, Ship.connection.open_transactions + after_level = connection.user_options_isolation_level + _(in_level).must_match %r{serializable}i + _(after_level).must_match %r{read committed}i + ensure + # Reset all connections. Otherwise, the next test may fail with error 'DBPROCESS is dead or not enabled'. Not sure why. + ActiveRecord::Base.connection_handler.clear_all_connections!(:all) + end + + it "can use an isolation level and reverts back to starting isolation level under exceptions" do + _(connection.user_options_isolation_level).must_match %r{read committed}i + _(lambda { + Ship.transaction(isolation: :serializable) { Ship.create! } + }).must_raise(ActiveRecord::RecordInvalid) + _(connection.user_options_isolation_level).must_match %r{read committed}i + ensure + # Reset all connections. Otherwise, the next test may fail with error 'DBPROCESS is dead or not enabled'. Not sure why. + ActiveRecord::Base.connection_handler.clear_all_connections!(:all) + end + + describe "when READ_COMMITTED_SNAPSHOT is set" do + it "should use READ COMMITTED as an isolation level" do + connection.execute "ALTER DATABASE [#{connection.current_database}] SET ALLOW_SNAPSHOT_ISOLATION ON" + connection.execute "ALTER DATABASE [#{connection.current_database}] SET READ_COMMITTED_SNAPSHOT ON WITH ROLLBACK IMMEDIATE" + + _(connection.user_options_isolation_level).must_match "read committed snapshot" + + Ship.transaction(isolation: :serializable) do + Ship.create! name: "Black Pearl" end - assert_equal 0, Ship.connection.open_transactions + + # We're actually testing that the isolation level was correctly reset to + # "READ COMMITTED", and that no exception was raised (it's reported back + # by SQL Server as "read committed snapshot"). + _(connection.user_options_isolation_level).must_match "read committed snapshot" + ensure + connection.execute "ALTER DATABASE [#{connection.current_database}] SET ALLOW_SNAPSHOT_ISOLATION OFF" + connection.execute "ALTER DATABASE [#{connection.current_database}] SET READ_COMMITTED_SNAPSHOT OFF WITH ROLLBACK IMMEDIATE" + + # Reset all connections. Otherwise, the next test may fail with error 'DBPROCESS is dead or not enabled'. Not sure why. + ActiveRecord::Base.connection_handler.clear_all_connections!(:all) end - end - - - + protected - + def delete_ships Ship.delete_all end - + def assert_no_ships assert Ship.count.zero?, "Expected Ship to have no models but it did have:\n#{Ship.all.inspect}" end - end - diff --git a/test/cases/trigger_test_sqlserver.rb b/test/cases/trigger_test_sqlserver.rb new file mode 100644 index 000000000..bfa456c60 --- /dev/null +++ b/test/cases/trigger_test_sqlserver.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class SQLServerTriggerTest < ActiveRecord::TestCase + after { exclude_output_inserted_table_names.clear } + + let(:exclude_output_inserted_table_names) do + ActiveRecord::ConnectionAdapters::SQLServerAdapter.exclude_output_inserted_table_names + end + + it "can insert into a table with output inserted - with a true setting for table name" do + exclude_output_inserted_table_names["sst_table_with_trigger"] = true + assert SSTestTriggerHistory.all.empty? + obj = SSTestTrigger.create! event_name: "test trigger" + _(["Fixnum", "Integer"]).must_include obj.id.class.name + _(obj.event_name).must_equal "test trigger" + _(obj.id).must_be :present? + _(obj.id.to_s).must_equal SSTestTriggerHistory.first.id_source + end + + it "can insert into a table with output inserted - with a uniqueidentifier value" do + exclude_output_inserted_table_names["sst_table_with_uuid_trigger"] = "uniqueidentifier" + assert SSTestTriggerHistory.all.empty? + obj = SSTestTriggerUuid.create! event_name: "test uuid trigger" + _(obj.id.class.name).must_equal "String" + _(obj.event_name).must_equal "test uuid trigger" + _(obj.id).must_be :present? + _(obj.id.to_s).must_equal SSTestTriggerHistory.first.id_source + end + + it "can insert into a table with composite pk with output inserted - with a true setting for table name" do + exclude_output_inserted_table_names["sst_table_with_composite_pk_trigger"] = true + assert SSTestTriggerHistory.all.empty? + obj = SSTestTriggerCompositePk.create! pk_col_one: 123, pk_col_two: 42, event_name: "test trigger" + _(obj.event_name).must_equal "test trigger" + _(obj.pk_col_one).must_equal 123 + _(obj.pk_col_two).must_equal 42 + _(obj.pk_col_one.to_s).must_equal SSTestTriggerHistory.first.id_source + end + + it "can insert into a table with composite pk with different data type with output inserted - with a hash setting for table name" do + exclude_output_inserted_table_names["sst_table_with_composite_pk_trigger_with_different_data_type"] = {pk_col_one: "uniqueidentifier", pk_col_two: "int"} + assert SSTestTriggerHistory.all.empty? + obj = SSTestTriggerCompositePkWithDefferentDataType.create! pk_col_two: 123, event_name: "test trigger" + _(obj.event_name).must_equal "test trigger" + _(obj.pk_col_one).must_be :present? + _(obj.pk_col_two).must_equal 123 + _(obj.pk_col_one.to_s).must_equal SSTestTriggerHistory.first.id_source + end +end diff --git a/test/cases/unicode_test_sqlserver.rb b/test/cases/unicode_test_sqlserver.rb deleted file mode 100644 index e96ea86e5..000000000 --- a/test/cases/unicode_test_sqlserver.rb +++ /dev/null @@ -1,48 +0,0 @@ -# encoding: UTF-8 -require 'cases/sqlserver_helper' - -class UnicodeTestSqlserver < ActiveRecord::TestCase - - - context 'Testing basic saves and unicode limits' do - - should 'save and reload simple nchar string' do - assert nchar_data = SqlServerUnicode.create!(:nchar => 'A') - assert_equal 'A', SqlServerUnicode.find(nchar_data.id).nchar - end - - should 'save and reload simple nvarchar(max) string' do - test_string = 'Ken Collins' - assert nvarcharmax_data = SqlServerUnicode.create!(:nvarchar_max => test_string) - assert_equal test_string, SqlServerUnicode.find(nvarcharmax_data.id).nvarchar_max - end - - should 'not work with ANSI_WARNINGS for string truncation' do - SqlServerUnicode.create!(:nchar_10 => '01234567891') - end - - end - - context 'Testing unicode data' do - - setup do - @unicode_data = "\344\270\200\344\272\21434\344\272\224\345\205\255" # "一二34五六" - end - - should 'insert and retrieve unicode data' do - assert data = SqlServerUnicode.create!(:nvarchar => @unicode_data) - if connection_mode_dblib? - assert_equal "一二34五六", data.reload.nvarchar - elsif connection_mode_odbc? - assert_equal "一二34五六", data.reload.nvarchar, 'perhaps you are not using the utf8 odbc that does this legwork' - else - raise 'need to add a case for this' - end - assert_equal Encoding.find('UTF-8'), data.nvarchar.encoding if ruby_19? - end - - end - - - -end diff --git a/test/cases/uniqueness_validation_test_sqlserver.rb b/test/cases/uniqueness_validation_test_sqlserver.rb deleted file mode 100644 index e68d9db05..000000000 --- a/test/cases/uniqueness_validation_test_sqlserver.rb +++ /dev/null @@ -1,44 +0,0 @@ -# encoding: utf-8 -require 'cases/sqlserver_helper' -require 'models/event' - -class Event < ActiveRecord::Base - before_validation :strip_mb_chars_for_sqlserver - protected - def strip_mb_chars_for_sqlserver - self.title = title.mb_chars.to(4).to_s if title && title.is_utf8? - end -end - -class UniquenessValidationTestSqlserver < ActiveRecord::TestCase -end - -class UniquenessValidationTest < ActiveRecord::TestCase - - COERCED_TESTS = [:test_validate_uniqueness_with_limit_and_utf8] - - include SqlserverCoercedTest - - # I guess most databases just truncate a string when inserting. To pass this test we do a few things. - # First, we make sure the type is unicode safe, second we extend the limit to well beyond what is - # needed. At the top we make sure to auto truncate the :title string like other databases would do - # automatically. - # - # "一二三四五".mb_chars.size # => 5 - # "一二三四五六七八".mb_chars.size # => 8 - # "一二三四五六七八".mb_chars.to(4).to_s # => "一二三四五" - - def test_coerced_validate_uniqueness_with_limit_and_utf8 - with_kcode('UTF8') do - Event.connection.change_column :events, :title, :nvarchar, :limit => 30 - Event.reset_column_information - # Now the actual test copied from core. - e1 = Event.create(:title => "一二三四五") - assert e1.valid?, "Could not create an event with a unique, 5 character title" - e2 = Event.create(:title => "一二三四五六七八") - assert !e2.valid?, "Created an event whose title, with limit taken into account, is not unique" - end - end - -end - diff --git a/test/cases/utils_test_sqlserver.rb b/test/cases/utils_test_sqlserver.rb new file mode 100644 index 000000000..c27f822f8 --- /dev/null +++ b/test/cases/utils_test_sqlserver.rb @@ -0,0 +1,129 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class UtilsTestSQLServer < ActiveRecord::TestCase + it ".quote_string" do + _(SQLServer::Utils.quote_string("I'll store this in C:\\Users")).must_equal "I''ll store this in C:\\Users" + end + + it ".unquote_string" do + _(SQLServer::Utils.unquote_string("I''ll store this in C:\\Users")).must_equal "I'll store this in C:\\Users" + end + + it ".quoted_raw" do + _(SQLServer::Utils.quoted_raw("some.Name")).must_equal "[some.Name]" + end + + describe ".extract_identifiers constructor and thus SQLServer::Utils::Name value object" do + let(:valid_names) { valid_names_unquoted + valid_names_quoted } + + let(:valid_names_unquoted) { + [ + "server.database.schema.object", + "server.database..object", + "server..schema.object", + "server...object", + "database.schema.object", + "database..object", + "schema.object", + "object" + ] + } + + let(:valid_names_quoted) { + [ + "[server].[database].[schema].[object]", + "[server].[database]..[object]", + "[server]..[schema].[object]", + "[server]...[object]", + "[database].[schema].[object]", + "[database]..[object]", + "[schema].[object]", + "[object]" + ] + } + + let(:server_names) { valid_names.partition { |name| name =~ /server/ } } + let(:database_names) { valid_names.partition { |name| name =~ /database/ } } + let(:schema_names) { valid_names.partition { |name| name =~ /schema/ } } + + it "extracts and returns #object identifier unquoted by default or quoted as needed" do + valid_names.each do |n| + name = extract_identifiers(n) + _(name.object).must_equal "object", "With #{n.inspect} for #object" + _(name.object_quoted).must_equal "[object]", "With #{n.inspect} for #object_quoted" + end + end + + [:schema, :database, :server].each do |part| + it "extracts and returns #{part} identifier unquoted by default or quoted as needed" do + present, blank = send(:"#{part}_names") + present.each do |n| + name = extract_identifiers(n) + _(name.send(:"#{part}")).must_equal part.to_s, "With #{n.inspect} for ##{part} method" + _(name.send(:"#{part}_quoted")).must_equal "[#{part}]", "With #{n.inspect} for ##{part}_quoted method" + end + blank.each do |n| + name = extract_identifiers(n) + _(name.send(:"#{part}")).must_be_nil "With #{n.inspect} for ##{part} method" + _(name.send(:"#{part}_quoted")).must_be_nil "With #{n.inspect} for ##{part}_quoted method" + end + end + end + + it "does not blow up on nil or blank string name" do + _(extract_identifiers(nil).object).must_be_nil + _(extract_identifiers(" ").object).must_be_nil + end + + it "has a #quoted that returns a fully quoted name with all identifiers as originally passed in" do + _(extract_identifiers("object").quoted).must_equal "[object]" + _(extract_identifiers("server.database..object").quoted).must_equal "[server].[database]..[object]" + _(extract_identifiers("[server]...[object]").quoted).must_equal "[server]...[object]" + end + + it "can take a symbol argument" do + _(extract_identifiers(:object).object).must_equal "object" + end + + it "allows identifiers with periods to work" do + _(extract_identifiers("[obj.name]").quoted).must_equal "[obj.name]" + _(extract_identifiers("[obj.name].[foo]").quoted).must_equal "[obj.name].[foo]" + end + + it "should indicate if a name is fully qualified" do + _(extract_identifiers("object").fully_qualified?).must_equal false + _(extract_identifiers("schema.object").fully_qualified?).must_equal false + _(extract_identifiers("database.schema.object").fully_qualified?).must_equal false + _(extract_identifiers("database.object").fully_qualified?).must_equal false + _(extract_identifiers("server...object").fully_qualified?).must_equal false + _(extract_identifiers("server.database..object").fully_qualified?).must_equal false + _(extract_identifiers("server.database.schema.object").fully_qualified?).must_equal true + _(extract_identifiers("server.database.schema.").fully_qualified?).must_equal true + _(extract_identifiers("[obj.name]").fully_qualified?).must_equal false + _(extract_identifiers("[schema].[obj.name]").fully_qualified?).must_equal false + _(extract_identifiers("[database].[schema].[obj.name]").fully_qualified?).must_equal false + _(extract_identifiers("[database].[obj.name]").fully_qualified?).must_equal false + _(extract_identifiers("[server.name]...[obj.name]").fully_qualified?).must_equal false + _(extract_identifiers("[server.name].[database]..[obj.name]").fully_qualified?).must_equal false + _(extract_identifiers("[server.name].[database].[schema].[obj.name]").fully_qualified?).must_equal true + _(extract_identifiers("[server.name].[database].[schema].").fully_qualified?).must_equal true + end + + it "can return fully qualified quoted table name" do + name = extract_identifiers("[my.server].db.schema.") + _(name.fully_qualified_database_quoted).must_equal "[my.server].[db]" + name = extract_identifiers("[server.name].[database].[schema].[object]") + _(name.fully_qualified_database_quoted).must_equal "[server.name].[database]" + name = extract_identifiers("server.database.schema.object") + _(name.fully_qualified_database_quoted).must_equal "[server].[database]" + end + end + + private + + def extract_identifiers(name) + SQLServer::Utils.extract_identifiers(name) + end +end diff --git a/test/cases/uuid_test_sqlserver.rb b/test/cases/uuid_test_sqlserver.rb new file mode 100644 index 000000000..134687ca3 --- /dev/null +++ b/test/cases/uuid_test_sqlserver.rb @@ -0,0 +1,54 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class SQLServerUuidTest < ActiveRecord::TestCase + let(:acceptable_uuid) { ActiveRecord::ConnectionAdapters::SQLServer::Type::Uuid::ACCEPTABLE_UUID } + + it "has a uuid primary key" do + _(SSTestUuid.columns_hash["id"].type).must_equal :uuid + assert SSTestUuid.primary_key + end + + it "can create with a new pk" do + obj = SSTestUuid.create! + _(obj.id).must_be :present? + _(obj.id).must_match acceptable_uuid + end + + it "can create other uuid column on reload" do + obj = SSTestUuid.create! + obj.reload + _(obj.other_uuid).must_match acceptable_uuid + end + + it "can find uuid pk via connection" do + _(connection.primary_key(SSTestUuid.table_name)).must_equal "id" + end + + it "changing column default" do + table_name = SSTestUuid.table_name + connection.add_column table_name, :thingy, :uuid, null: false, default: "NEWSEQUENTIALID()" + SSTestUuid.reset_column_information + column = SSTestUuid.columns_hash["thingy"] + _(column.default_function).must_equal "newsequentialid()" + # Now to a different function. + connection.change_column table_name, :thingy, :uuid, null: false, default: "NEWID()" + SSTestUuid.reset_column_information + column = SSTestUuid.columns_hash["thingy"] + _(column.default_function).must_equal "newid()" + end + + it "can insert even when use_output_inserted to false " do + obj = with_use_output_inserted_disabled { SSTestUuid.create!(name: "😢") } + _(obj.id).must_be :nil? + end + + it "can add column with proc as default" do + table_name = SSTestUuid.table_name + connection.add_column table_name, :thingy, :uuid, null: false, default: -> { "NEWSEQUENTIALID()" } + SSTestUuid.reset_column_information + column = SSTestUuid.columns_hash["thingy"] + _(column.default_function).must_equal "newsequentialid()" + end +end diff --git a/test/cases/view_test_sqlserver.rb b/test/cases/view_test_sqlserver.rb new file mode 100644 index 000000000..ed5d2303c --- /dev/null +++ b/test/cases/view_test_sqlserver.rb @@ -0,0 +1,68 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" + +class ViewTestSQLServer < ActiveRecord::TestCase + let(:connection) { ActiveRecord::Base.lease_connection } + + describe "view with default values" do + before do + begin + connection.drop_table :view_casing_table + rescue + nil + end + connection.create_table :view_casing_table, force: true do |t| + t.boolean :Default_Falsey, null: false, default: false + t.boolean :Default_Truthy, null: false, default: true + t.string :default_string_null, null: true, default: nil + t.string :default_string, null: false, default: "abc" + end + + connection.execute("DROP VIEW IF EXISTS view_casing_table_view;") + connection.execute <<-SQL + CREATE VIEW view_casing_table_view AS + SELECT id AS id, + default_falsey AS falsey, + default_truthy AS truthy, + default_string_null AS s_null, + default_string AS s + FROM view_casing_table + SQL + end + + it "default values are correct when column casing used in tables and views are different" do + klass = Class.new(ActiveRecord::Base) do + self.table_name = "view_casing_table_view" + end + + obj = klass.new + assert_equal false, obj.falsey + assert_equal true, obj.truthy + assert_equal "abc", obj.s + assert_nil obj.s_null + assert_equal 0, klass.count + + obj.save! + assert_equal false, obj.falsey + assert_equal true, obj.truthy + assert_equal "abc", obj.s + assert_nil obj.s_null + assert_equal 1, klass.count + end + end + + describe "identity insert" do + it "creates table record through a view" do + assert_difference("SSTestCustomersView.count", 2) do + SSTestCustomersView.create!(id: 5, name: "Bob") + SSTestCustomersView.create!(id: 6, name: "Tim") + end + end + + it "creates table records through a view using fixtures" do + ActiveRecord::FixtureSet.create_fixtures(File.join(ARTest::SQLServer.test_root_sqlserver, "fixtures"), ["sst_customers_view"]) + assert_equal SSTestCustomersView.all.count, 2 + end + end +end diff --git a/test/cases/virtual_column_test_sqlserver.rb b/test/cases/virtual_column_test_sqlserver.rb new file mode 100644 index 000000000..44d740552 --- /dev/null +++ b/test/cases/virtual_column_test_sqlserver.rb @@ -0,0 +1,113 @@ +# frozen_string_literal: true + +require "cases/helper_sqlserver" +require "support/schema_dumping_helper" + +class VirtualColumnTestSQLServer < ActiveRecord::TestCase + include SchemaDumpingHelper + + class VirtualColumn < ActiveRecord::Base + end + + def setup + @connection = ActiveRecord::Base.lease_connection + @connection.create_table :virtual_columns, force: true do |t| + t.string :name + t.virtual :upper_name, as: "UPPER(name)", stored: true + t.virtual :lower_name, as: "LOWER(name)", stored: false + t.virtual :octet_name, as: "LEN(name)" + t.virtual :mutated_name, as: "REPLACE(name, 'l', 'L')" + t.integer :column1 + end + VirtualColumn.create(name: "Rails", column1: 10) + end + + def teardown + @connection.drop_table :virtual_columns, if_exists: true + VirtualColumn.reset_column_information + end + + def test_virtual_column_with_full_inserts + partial_inserts_was = VirtualColumn.partial_inserts + VirtualColumn.partial_inserts = false + assert_nothing_raised do + VirtualColumn.create!(name: "Rails") + end + ensure + VirtualColumn.partial_inserts = partial_inserts_was + end + + def test_stored_column + column = VirtualColumn.columns_hash["upper_name"] + assert_predicate column, :virtual? + assert_predicate column, :virtual_stored? + assert_equal "RAILS", VirtualColumn.take.upper_name + end + + def test_explicit_virtual_column + column = VirtualColumn.columns_hash["lower_name"] + assert_predicate column, :virtual? + assert_not_predicate column, :virtual_stored? + assert_equal "rails", VirtualColumn.take.lower_name + end + + def test_implicit_virtual_column + column = VirtualColumn.columns_hash["octet_name"] + assert_predicate column, :virtual? + assert_not_predicate column, :virtual_stored? + assert_equal 5, VirtualColumn.take.octet_name + end + + def test_virtual_column_with_comma_in_definition + column = VirtualColumn.columns_hash["mutated_name"] + assert_predicate column, :virtual? + assert_not_predicate column, :virtual_stored? + assert_not_nil column.default_function + assert_equal "RaiLs", VirtualColumn.take.mutated_name + end + + def test_change_table_with_stored_generated_column + @connection.change_table :virtual_columns do |t| + t.virtual :decr_column1, as: "column1 - 1", stored: true + end + VirtualColumn.reset_column_information + column = VirtualColumn.columns_hash["decr_column1"] + assert_predicate column, :virtual? + assert_predicate column, :virtual_stored? + assert_equal 9, VirtualColumn.take.decr_column1 + end + + def test_change_table_with_explicit_virtual_generated_column + @connection.change_table :virtual_columns do |t| + t.virtual :incr_column1, as: "column1 + 1", stored: false + end + VirtualColumn.reset_column_information + column = VirtualColumn.columns_hash["incr_column1"] + assert_predicate column, :virtual? + assert_not_predicate column, :virtual_stored? + assert_equal 11, VirtualColumn.take.incr_column1 + end + + def test_change_table_with_implicit_virtual_generated_column + @connection.change_table :virtual_columns do |t| + t.virtual :sqr_column1, as: "power(column1, 2)" + end + VirtualColumn.reset_column_information + column = VirtualColumn.columns_hash["sqr_column1"] + assert_predicate column, :virtual? + assert_not_predicate column, :virtual_stored? + assert_equal 100, VirtualColumn.take.sqr_column1 + end + + def test_schema_dumping + output = dump_table_schema("virtual_columns") + assert_match(/t\.virtual\s+"lower_name",\s+as: "\(lower\(\[name\]\)\)", stored: false$/i, output) + assert_match(/t\.virtual\s+"upper_name",\s+as: "\(upper\(\[name\]\)\)", stored: true$/i, output) + assert_match(/t\.virtual\s+"octet_name",\s+as: "\(len\(\[name\]\)\)", stored: false$/i, output) + end + + def test_build_fixture_sql + fixtures = ActiveRecord::FixtureSet.create_fixtures(FIXTURES_ROOT, :virtual_columns).first + assert_equal 2, fixtures.size + end +end diff --git a/test/config.yml b/test/config.yml index 553903d79..ef095168c 100644 --- a/test/config.yml +++ b/test/config.yml @@ -1,34 +1,30 @@ -default_connection: dblib default_connection_info: &default_connection_info adapter: sqlserver - mode: <%= ENV['ARCONN'] || 'dblib' %> host: <%= ENV['ACTIVERECORD_UNITTEST_HOST'] || 'localhost' %> port: <%= ENV['ACTIVERECORD_UNITTEST_PORT'] %> database: activerecord_unittest username: <%= ENV['ACTIVERECORD_UNITTEST_USER'] || 'rails' %> password: <%= ENV['ACTIVERECORD_UNITTEST_PASS'] || '' %> - azure: <%= !ENV['ACTIVERECORD_UNITTEST_AZURE'].nil? %> + collation: <%= ENV['ACTIVERECORD_UNITTEST_COLLATION'] || nil %> + encoding: utf8 connections: - - dblib: + + sqlserver: arunit: <<: *default_connection_info appname: SQLServerAdptrUnit dataserver: <%= ENV['ACTIVERECORD_UNITTEST_DATASERVER'] %> + tds_version: <%= ENV['ACTIVERECORD_UNITTEST_TDSVERSION'] %> + azure: <%= !ENV['ACTIVERECORD_UNITTEST_AZURE'].nil? %> + timeout: <%= ENV['ACTIVERECORD_UNITTEST_AZURE'].present? ? 20 : 10 %> arunit2: <<: *default_connection_info database: activerecord_unittest2 appname: SQLServerAdptrUnit2 dataserver: <%= ENV['ACTIVERECORD_UNITTEST_DATASERVER'] %> + tds_version: <%= ENV['ACTIVERECORD_UNITTEST_TDSVERSION'] %> + azure: <%= !ENV['ACTIVERECORD_UNITTEST_AZURE'].nil? %> + timeout: <%= ENV['ACTIVERECORD_UNITTEST_AZURE'].present? ? 20 : 10 %> - odbc: - arunit: - <<: *default_connection_info - dsn: <%= ENV['ACTIVERECORD_UNITTEST2_DSN'] || 'activerecord_unittest' %> - arunit2: - <<: *default_connection_info - database: activerecord_unittest2 - dsn: <%= ENV['ACTIVERECORD_UNITTEST2_DSN'] || 'activerecord_unittest2' %> - diff --git a/test/debug.rb b/test/debug.rb new file mode 100644 index 000000000..dc918ddb0 --- /dev/null +++ b/test/debug.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +# require 'rails/all' +require "tiny_tds" + +c = TinyTds::Client.new( + host: ENV["CI_AZURE_HOST"], + username: "rails", + password: ENV["CI_AZURE_PASS"], + database: "activerecord_unittest", + azure: true, + tds_version: "7.3" +) + +puts c.execute("SELECT 1 AS [one]").each +c.close diff --git a/test/fixtures/1px.gif b/test/fixtures/1px.gif new file mode 100644 index 000000000..f32722af9 Binary files /dev/null and b/test/fixtures/1px.gif differ diff --git a/test/fixtures/sst_customers_view.yml b/test/fixtures/sst_customers_view.yml new file mode 100644 index 000000000..668ba3763 --- /dev/null +++ b/test/fixtures/sst_customers_view.yml @@ -0,0 +1,6 @@ +david: + name: "David" + balance: 2,004 +aidan: + name: "Aidan" + balance: 10,191 diff --git a/test/migrations/create_clients_and_change_column_collation.rb b/test/migrations/create_clients_and_change_column_collation.rb new file mode 100644 index 000000000..a14568c3a --- /dev/null +++ b/test/migrations/create_clients_and_change_column_collation.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +class CreateClientsAndChangeColumnCollation < ActiveRecord::Migration[5.2] + def up + create_table :clients do |t| + t.string :name + t.string :code, collation: :SQL_Latin1_General_CP1_CS_AS + + t.timestamps + end + + change_column :clients, :name, :string, collation: "SQL_Latin1_General_CP1_CS_AS" + change_column :clients, :code, :string, collation: "SQL_Latin1_General_CP1_CI_AS" + end + + def down + drop_table :clients + end +end diff --git a/test/migrations/create_clients_and_change_column_null.rb b/test/migrations/create_clients_and_change_column_null.rb new file mode 100644 index 000000000..7f527806c --- /dev/null +++ b/test/migrations/create_clients_and_change_column_null.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +class CreateClientsAndChangeColumnNull < ActiveRecord::Migration[5.2] + def up + create_table :clients do |t| + t.string :name + t.string :code + t.decimal :value + + t.timestamps + end + + change_column :clients, :name, :string, limit: 15 + change_column :clients, :code, :string, default: "n/a" + change_column :clients, :value, :decimal, precision: 32, scale: 8 + + change_column_null :clients, :name, false + change_column_null :clients, :code, false + change_column_null :clients, :value, false + end + + def down + drop_table :clients + end +end diff --git a/test/migrations/transaction_table/1_table_will_never_be_created.rb b/test/migrations/transaction_table/1_table_will_never_be_created.rb index eeab6d807..c850fae9b 100644 --- a/test/migrations/transaction_table/1_table_will_never_be_created.rb +++ b/test/migrations/transaction_table/1_table_will_never_be_created.rb @@ -1,11 +1,11 @@ -class TableWillNeverBeCreated < ActiveRecord::Migration - +# frozen_string_literal: true + +class TableWillNeverBeCreated < ActiveRecord::Migration[5.2] def self.up - create_table(:sqlserver_trans_table1) { } - create_table(:sqlserver_trans_table2) { raise ActiveRecord::StatementInvalid } + create_table(:sqlserver_trans_table1) {} + create_table(:sqlserver_trans_table2) { raise("HELL") } end - + def self.down end - end diff --git a/test/models/sqlserver/alien.rb b/test/models/sqlserver/alien.rb new file mode 100644 index 000000000..a41c51888 --- /dev/null +++ b/test/models/sqlserver/alien.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class Alien < ActiveRecord::Base + self.table_name = "test.aliens" +end diff --git a/test/models/sqlserver/booking.rb b/test/models/sqlserver/booking.rb new file mode 100644 index 000000000..e3f70a5c5 --- /dev/null +++ b/test/models/sqlserver/booking.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestBooking < ActiveRecord::Base + self.table_name = "sst_bookings" +end diff --git a/test/models/sqlserver/composite_pk.rb b/test/models/sqlserver/composite_pk.rb new file mode 100644 index 000000000..180c8ac28 --- /dev/null +++ b/test/models/sqlserver/composite_pk.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +class SSCompositePkWithoutIdentity < ActiveRecord::Base + self.table_name = :sst_composite_without_identity +end + +class SSCompositePkWithIdentity < ActiveRecord::Base + self.table_name = :sst_composite_with_identity +end diff --git a/test/models/sqlserver/customers_view.rb b/test/models/sqlserver/customers_view.rb new file mode 100644 index 000000000..f362216cb --- /dev/null +++ b/test/models/sqlserver/customers_view.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestCustomersView < ActiveRecord::Base + self.table_name = "sst_customers_view" +end diff --git a/test/models/sqlserver/datatype.rb b/test/models/sqlserver/datatype.rb new file mode 100644 index 000000000..3e3d61ed5 --- /dev/null +++ b/test/models/sqlserver/datatype.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestDatatype < ActiveRecord::Base + self.table_name = :sst_datatypes +end diff --git a/test/models/sqlserver/datatype_migration.rb b/test/models/sqlserver/datatype_migration.rb new file mode 100644 index 000000000..2a77cce60 --- /dev/null +++ b/test/models/sqlserver/datatype_migration.rb @@ -0,0 +1,10 @@ +# frozen_string_literal: true + +class SSTestDatatypeMigration < ActiveRecord::Base + self.table_name = :sst_datatypes_migration +end + +class SSTestDatatypeMigrationJson < ActiveRecord::Base + self.table_name = :sst_datatypes_migration + attribute :json_col, ActiveRecord::Type::SQLServer::Json.new +end diff --git a/test/models/sqlserver/dollar_table_name.rb b/test/models/sqlserver/dollar_table_name.rb new file mode 100644 index 000000000..ba0fc1596 --- /dev/null +++ b/test/models/sqlserver/dollar_table_name.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestDollarTableName < ActiveRecord::Base + self.table_name = "sst_my$strange_table" +end diff --git a/test/models/sqlserver/edge_schema.rb b/test/models/sqlserver/edge_schema.rb new file mode 100644 index 000000000..122908699 --- /dev/null +++ b/test/models/sqlserver/edge_schema.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +class SSTestEdgeSchema < ActiveRecord::Base + self.table_name = "sst_edge_schemas" + + def with_spaces + read_attribute :"with spaces" + end + + def with_spaces=(value) + write_attribute :"with spaces", value + end +end diff --git a/test/models/sqlserver/fk_has_fk.rb b/test/models/sqlserver/fk_has_fk.rb new file mode 100644 index 000000000..6214b5055 --- /dev/null +++ b/test/models/sqlserver/fk_has_fk.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestHasFk < ActiveRecord::Base + self.table_name = "sst_has_fks" +end diff --git a/test/models/sqlserver/fk_has_pk.rb b/test/models/sqlserver/fk_has_pk.rb new file mode 100644 index 000000000..814e22480 --- /dev/null +++ b/test/models/sqlserver/fk_has_pk.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestHasPk < ActiveRecord::Base + self.table_name = "sst_has_pks" +end diff --git a/test/models/sqlserver/natural_pk_data.rb b/test/models/sqlserver/natural_pk_data.rb new file mode 100644 index 000000000..9da2a6c11 --- /dev/null +++ b/test/models/sqlserver/natural_pk_data.rb @@ -0,0 +1,6 @@ +# frozen_string_literal: true + +class SSTestNaturalPkData < ActiveRecord::Base + self.table_name = "sst_natural_pk_data" + self.primary_key = "legacy_id" +end diff --git a/test/models/sqlserver/natural_pk_int_data.rb b/test/models/sqlserver/natural_pk_int_data.rb new file mode 100644 index 000000000..294922e0f --- /dev/null +++ b/test/models/sqlserver/natural_pk_int_data.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestNaturalPkIntData < ActiveRecord::Base + self.table_name = "sst_natural_pk_int_data" +end diff --git a/test/models/sqlserver/no_pk_data.rb b/test/models/sqlserver/no_pk_data.rb new file mode 100644 index 000000000..1a5d15d36 --- /dev/null +++ b/test/models/sqlserver/no_pk_data.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestNoPkData < ActiveRecord::Base + self.table_name = "sst_no_pk_data" +end diff --git a/test/models/sqlserver/object_default.rb b/test/models/sqlserver/object_default.rb new file mode 100644 index 000000000..728c4a802 --- /dev/null +++ b/test/models/sqlserver/object_default.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestObjectDefault < ActiveRecord::Base + self.table_name = "sst_defaultobjects" +end diff --git a/test/models/sqlserver/quoted_table.rb b/test/models/sqlserver/quoted_table.rb new file mode 100644 index 000000000..e387bb3df --- /dev/null +++ b/test/models/sqlserver/quoted_table.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +class SSTestQuotedTable < ActiveRecord::Base + self.table_name = "[sst_quoted-table]" +end + +class SSTestQuotedTableUser < ActiveRecord::Base + self.table_name = "[dbo].[sst_quoted-table]" +end diff --git a/test/models/sqlserver/quoted_view_1.rb b/test/models/sqlserver/quoted_view_1.rb new file mode 100644 index 000000000..5e816ae8c --- /dev/null +++ b/test/models/sqlserver/quoted_view_1.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestQuotedView1 < ActiveRecord::Base + self.table_name = "sst_quoted-view1" +end diff --git a/test/models/sqlserver/quoted_view_2.rb b/test/models/sqlserver/quoted_view_2.rb new file mode 100644 index 000000000..be9d62f46 --- /dev/null +++ b/test/models/sqlserver/quoted_view_2.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestQuotedView2 < ActiveRecord::Base + self.table_name = "sst_quoted-view2" +end diff --git a/test/models/sqlserver/recurring_task.rb b/test/models/sqlserver/recurring_task.rb new file mode 100644 index 000000000..aecf7c462 --- /dev/null +++ b/test/models/sqlserver/recurring_task.rb @@ -0,0 +1,3 @@ +class RecurringTask < ActiveRecord::Base + self.table_name = "recurring_tasks" +end diff --git a/test/models/sqlserver/sst_memory.rb b/test/models/sqlserver/sst_memory.rb new file mode 100644 index 000000000..b4d26e602 --- /dev/null +++ b/test/models/sqlserver/sst_memory.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTMemory < ActiveRecord::Base + self.table_name = "sst_memory" +end diff --git a/test/models/sqlserver/sst_string_collation.rb b/test/models/sqlserver/sst_string_collation.rb new file mode 100644 index 000000000..cdb08d775 --- /dev/null +++ b/test/models/sqlserver/sst_string_collation.rb @@ -0,0 +1,3 @@ +class SstStringCollation < ActiveRecord::Base + self.table_name = "sst_string_collation" +end diff --git a/test/models/sqlserver/string_default.rb b/test/models/sqlserver/string_default.rb new file mode 100644 index 000000000..f4c2db47c --- /dev/null +++ b/test/models/sqlserver/string_default.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestStringDefault < ActiveRecord::Base + self.table_name = "sst_string_defaults" +end diff --git a/test/models/sqlserver/string_defaults_big_view.rb b/test/models/sqlserver/string_defaults_big_view.rb new file mode 100644 index 000000000..af8e6da6d --- /dev/null +++ b/test/models/sqlserver/string_defaults_big_view.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestStringDefaultsBigView < ActiveRecord::Base + self.table_name = "sst_string_defaults_big_view" +end diff --git a/test/models/sqlserver/string_defaults_view.rb b/test/models/sqlserver/string_defaults_view.rb new file mode 100644 index 000000000..4bcb4fe60 --- /dev/null +++ b/test/models/sqlserver/string_defaults_view.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestStringDefaultsView < ActiveRecord::Base + self.table_name = "sst_string_defaults_view" +end diff --git a/test/models/sqlserver/table_with_spaces.rb b/test/models/sqlserver/table_with_spaces.rb new file mode 100644 index 000000000..d5f07ec4a --- /dev/null +++ b/test/models/sqlserver/table_with_spaces.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class TableWithSpaces < ActiveRecord::Base + self.table_name = "A Table With Spaces" +end diff --git a/test/models/sqlserver/tinyint_pk.rb b/test/models/sqlserver/tinyint_pk.rb new file mode 100644 index 000000000..55c05c1a4 --- /dev/null +++ b/test/models/sqlserver/tinyint_pk.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestTinyintPk < ActiveRecord::Base + self.table_name = "sst_tinyint_pk" +end diff --git a/test/models/sqlserver/trigger.rb b/test/models/sqlserver/trigger.rb new file mode 100644 index 000000000..c668f1f2a --- /dev/null +++ b/test/models/sqlserver/trigger.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +class SSTestTrigger < ActiveRecord::Base + self.table_name = "sst_table_with_trigger" +end + +class SSTestTriggerUuid < ActiveRecord::Base + self.table_name = "sst_table_with_uuid_trigger" +end + +class SSTestTriggerCompositePk < ActiveRecord::Base + self.table_name = "sst_table_with_composite_pk_trigger" +end + +class SSTestTriggerCompositePkWithDefferentDataType < ActiveRecord::Base + self.table_name = "sst_table_with_composite_pk_trigger_with_different_data_type" +end diff --git a/test/models/sqlserver/trigger_history.rb b/test/models/sqlserver/trigger_history.rb new file mode 100644 index 000000000..d93bc4384 --- /dev/null +++ b/test/models/sqlserver/trigger_history.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestTriggerHistory < ActiveRecord::Base + self.table_name = "sst_table_with_trigger_history" +end diff --git a/test/models/sqlserver/upper.rb b/test/models/sqlserver/upper.rb new file mode 100644 index 000000000..0fa400977 --- /dev/null +++ b/test/models/sqlserver/upper.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestUpper < ActiveRecord::Base + self.table_name = "sst_upper_tests" +end diff --git a/test/models/sqlserver/uppered.rb b/test/models/sqlserver/uppered.rb new file mode 100644 index 000000000..9d025b0ab --- /dev/null +++ b/test/models/sqlserver/uppered.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestUppered < ActiveRecord::Base + self.table_name = "SST_UPPER_TESTS" +end diff --git a/test/models/sqlserver/uuid.rb b/test/models/sqlserver/uuid.rb new file mode 100644 index 000000000..91da8fd9a --- /dev/null +++ b/test/models/sqlserver/uuid.rb @@ -0,0 +1,5 @@ +# frozen_string_literal: true + +class SSTestUuid < ActiveRecord::Base + self.table_name = "sst_uuids" +end diff --git a/test/profile/connection_profile_case.rb b/test/profile/connection_profile_case.rb deleted file mode 100644 index 7797dff8b..000000000 --- a/test/profile/connection_profile_case.rb +++ /dev/null @@ -1,30 +0,0 @@ -require 'profile/helper' -require 'models/topic' -require 'models/reply' - -class ConnectionProfileCase < ActiveRecord::TestCase - - fixtures :topics - - def setup - @connection = ActiveRecord::Base.connection - end - - def test_select - select_statement = "SELECT [topics].* FROM [topics]" - ruby_profile :connection_select do - 1000.times { @connection.send :select, select_statement } - end - end - - def test_select_one - select_statement = "SELECT [topics].* FROM [topics]" - ruby_profile :connection_select_one do - 1000.times { @connection.select_one(select_statement) } - end - end - - -end - - diff --git a/test/profile/finder_profile_case.rb b/test/profile/finder_profile_case.rb deleted file mode 100644 index 7fb0cc9ea..000000000 --- a/test/profile/finder_profile_case.rb +++ /dev/null @@ -1,18 +0,0 @@ -require 'profile/helper' -require 'models/topic' -require 'models/reply' - -class FinderProfileCase < ActiveRecord::TestCase - - fixtures :topics - - def test_find_all - ruby_profile :finder_find_all do - 1000.times { Topic.all } - end - end - - -end - - diff --git a/test/profile/gc_profile_case.rb b/test/profile/gc_profile_case.rb deleted file mode 100644 index 83834f351..000000000 --- a/test/profile/gc_profile_case.rb +++ /dev/null @@ -1,77 +0,0 @@ -require 'benchmark' -require 'cases/sqlserver_helper' -require 'models/topic' -require 'models/reply' - -raise "GC allocation benchmarks only supported on Ruby 1.9!" unless RUBY_VERSION >= '1.9' - -class GcProfileCase < ActiveRecord::TestCase - - fixtures :topics - - def setup - create_mass_topics unless @created_mass_topics - @connection = ActiveRecord::Base.connection - @select_statement = "SELECT [topics].* FROM [topics]" - end - - def test_coercion - bench_allocations('coercion') do - Topic.all(:limit => 100).each do |t| - t.attributes.keys.each do |k| - t.send(k.to_sym) - end - end - end - end - - def test_select - bench_allocations('select') do - @connection.send :select, @select_statement - end - end - - def test_select_one - bench_allocations('select_one') do - 100.times { @connection.select_one(@select_statement) } - end - end - - def test_columns - bench_allocations('columns') do - 100.times do - Topic.reset_column_information - Topic.columns - end - end - end - - - protected - - def create_mass_topics - GC::Profiler.clear - GC::Profiler.disable - all_topics = Topic.all - 100.times { all_topics.each { |t| Topic.create! t.attributes } } - @created_mass_topics = true - GC.start - GC::Profiler.enable - GC::Profiler.clear - end - - def bench_allocations(feature, iterations=10, &blk) - puts "\nGC overhead for #{feature}" - GC::Profiler.clear - GC::Profiler.enable - iterations.times{ blk.call } - GC::Profiler.report(STDOUT) - GC::Profiler.disable - end - -end - - - - - diff --git a/test/profile/helper.rb b/test/profile/helper.rb deleted file mode 100644 index 58fd5584f..000000000 --- a/test/profile/helper.rb +++ /dev/null @@ -1,29 +0,0 @@ -require 'cases/sqlserver_helper' -require 'ruby-prof' - -class ActiveRecord::TestCase - - - protected - - def ruby_profile(name) - result = RubyProf.profile { yield } - [:flat,:graph,:html].each do |printer| - save_ruby_prof_report(result, name, printer) - end - end - - def save_ruby_prof_report(result, name, printer) - ptr = case printer - when :flat then RubyProf::FlatPrinter - when :graph then RubyProf::GraphPrinter - when :html then RubyProf::GraphHtmlPrinter - end - file_name = printer == :html ? "#{name}_graph.html" : "#{name}_#{printer}.txt" - file_path = File.join(SQLSERVER_TEST_ROOT, 'profile', 'output', file_name) - File.open(file_path,'w') do |file| - printer == :html ? ptr.new(result).print(file) : ptr.new(result).print(file,0) - end - end - -end diff --git a/test/profile/query_plan_complex.rb b/test/profile/query_plan_complex.rb deleted file mode 100755 index 80a46a35a..000000000 --- a/test/profile/query_plan_complex.rb +++ /dev/null @@ -1,68 +0,0 @@ -=begin - -Query Plan Complex -================== -Author: Ken Collins -Date: May 22, 2011 -Summary: Benchmark complex cached query plan reuse in SQL Server. - -System Information ------------------- - Operating System: Mac OS X 10.6.7 (10J869) - CPU: Quad-Core Intel Xeon 2.66 GHz - Processor Count: 4 - Memory: 24 GB - ruby 1.8.7 (2011-02-18 patchlevel 334) [i686-darwin10.6.0], MBARI 0x6770, Ruby Enterprise Edition 2011.03 - -"Simple - Query Plan Reuse" is up to 88% faster over repetitions ------------------------------------------------------------------ - - Simple - Query Plan Reuse 0.230067014694214 secs Fastest - Simple - Dynamic SQL 1.99195981025696 secs 88% Slower - -=end - -require 'rubygems' -require 'bundler' -Bundler.setup -require 'tiny_tds' -require 'bench_press' - -extend BenchPress - -author 'Ken Collins' -summary 'Benchmark complex cached query plan reuse in SQL Server.' -reps 500 - -@client = TinyTds::Client.new :host => 'mc2008', :username => 'rails' - - -measure "Simple - Dynamic SQL" do - sql = " - SELECT TOP (1) [companies].id - FROM [companies] - LEFT OUTER JOIN [companies] [clients_using_primary_keys_companies] ON [clients_using_primary_keys_companies].[firm_name] = [companies].[name] - AND [clients_using_primary_keys_companies].[type] IN (N'Client', N'SpecialClient', N'VerySpecialClient') - WHERE [companies].[type] IN (N'Firm') - AND [companies].[id] = #{rand(1000000)} - GROUP BY [companies].id - ORDER BY MIN(clients_using_primary_keys_companies.name)" - @client.execute(sql).do -end - -measure "Simple - Query Plan Reuse" do - sql = " - EXEC sp_executesql N' - SELECT TOP (1) [companies].id - FROM [companies] - LEFT OUTER JOIN [companies] [clients_using_primary_keys_companies] ON [clients_using_primary_keys_companies].[firm_name] = [companies].[name] - AND [clients_using_primary_keys_companies].[type] IN (N''Client'', N''SpecialClient'', N''VerySpecialClient'') - WHERE [companies].[type] IN (N''Firm'') - AND [companies].[id] = @0 - GROUP BY [companies].id - ORDER BY MIN(clients_using_primary_keys_companies.name)', - N'@0 int', - @0 = #{rand(1000000)}" - @client.execute(sql).do -end - diff --git a/test/profile/query_plan_simple.rb b/test/profile/query_plan_simple.rb deleted file mode 100755 index a944ae8a4..000000000 --- a/test/profile/query_plan_simple.rb +++ /dev/null @@ -1,47 +0,0 @@ -=begin - -Query Plan Simple -================= -Author: Ken Collins -Date: May 22, 2011 -Summary: Benchmark simple cached query plan reuse in SQL Server. - -System Information ------------------- - Operating System: Mac OS X 10.6.7 (10J869) - CPU: Quad-Core Intel Xeon 2.66 GHz - Processor Count: 4 - Memory: 24 GB - ruby 1.8.7 (2011-02-18 patchlevel 334) [i686-darwin10.6.0], MBARI 0x6770, Ruby Enterprise Edition 2011.03 - -"Simple - Query Plan Reuse" is up to 58% faster over repetitions ------------------------------------------------------------------ - - Simple - Query Plan Reuse 0.20799994468689 secs Fastest - Simple - Dynamic SQL 0.49638819694519 secs 58% Slower - -=end - -require 'rubygems' -require 'bundler' -Bundler.setup -require 'tiny_tds' -require 'bench_press' - -extend BenchPress - -author 'Ken Collins' -summary 'Benchmark simple cached query plan reuse in SQL Server.' -reps 500 - -@client = TinyTds::Client.new :host => 'mc2008', :username => 'rails' - - -measure "Simple - Dynamic SQL" do - @client.execute("SELECT TOP(1) * FROM [posts] WHERE [id] = #{rand(1000000)}").do -end - -measure "Simple - Query Plan Reuse" do - @client.execute("EXEC sp_executesql N'SELECT TOP(1) * FROM [posts] WHERE [id] = @0', N'@0 int', @0 = #{rand(1000000)}").do -end - diff --git a/test/schema/datatypes/2012.sql b/test/schema/datatypes/2012.sql new file mode 100644 index 000000000..77b14807d --- /dev/null +++ b/test/schema/datatypes/2012.sql @@ -0,0 +1,56 @@ + +IF EXISTS ( + SELECT TABLE_NAME + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_NAME = N'sst_datatypes' +) DROP TABLE [sst_datatypes] + +CREATE TABLE [sst_datatypes] ( + -- Exact Numerics + [id] [int] NOT NULL IDENTITY(1,1) PRIMARY KEY, + [bigint] [bigint] NULL DEFAULT 42, + [int] [int] NULL DEFAULT 42, + [smallint] [smallint] NULL DEFAULT 42, + [tinyint] [tinyint] NULL DEFAULT 42, + [bit] [bit] NULL DEFAULT 1, + [decimal_9_2] [decimal](9, 2) NULL DEFAULT 12345.01, + [decimal_16_4] [decimal](16, 4) NULL DEFAULT 1234567.89, + [numeric_18_0] [numeric](18, 0) NULL DEFAULT 191, + [numeric_36_2] [numeric](36, 2) NULL DEFAULT 12345678901234567890.01, + [money] [money] NULL DEFAULT 4.20, + [smallmoney] [smallmoney] NULL DEFAULT 4.20, + -- Approximate Numerics + [float] [float] NULL DEFAULT 123.00000001, + [real] [real] NULL DEFAULT 123.45, + -- Date and Time + [date] [date] NULL DEFAULT '0001-01-01', + [datetime] [datetime] NULL DEFAULT '1753-01-01T00:00:00.123', + [datetime2_7] [datetime2](7) NULL DEFAULT '9999-12-31 23:59:59.9999999', + [datetime2_3] [datetime2](3) NULL, + [datetime2_1] [datetime2](1) NULL, + [datetime2_0] [datetime2](0) NULL, + [datetimeoffset_7] [datetimeoffset](7) NULL DEFAULT '1984-01-24 04:20:00.1234567 -08:00', + [datetimeoffset_3] [datetimeoffset](3) NULL, + [datetimeoffset_1] [datetimeoffset](1) NULL, + [smalldatetime] [smalldatetime] NULL DEFAULT '1901-01-01T15:45:00.000Z', + [time_7] [time](7) NULL DEFAULT '04:20:00.2883215', + [time_2] [time](2) NULL, + [time_default] [time] NULL DEFAULT '15:03:42.0621978', + -- Character Strings + [char_10] [char](10) NULL DEFAULT '1234567890', + [varchar_50] [varchar](50) NULL DEFAULT 'test varchar_50', + [varchar_max] [varchar](max) NULL DEFAULT 'test varchar_max', + [text] [text] NULL DEFAULT 'test text', + -- Unicode Character Strings + [nchar_10] [nchar](10) NULL DEFAULT N'12345678åå', + [nvarchar_50] [nvarchar](50) NULL DEFAULT N'test nvarchar_50 åå', + [nvarchar_max] [nvarchar](max) NULL DEFAULT N'test nvarchar_max åå', + [ntext] [ntext] NULL DEFAULT N'test ntext åå', + -- Binary Strings + [binary_49] [binary](49) NULL, + [varbinary_49] [varbinary](49) NULL, + [varbinary_max] [varbinary](max) NULL, + -- Other Data Types + [uniqueidentifier] [uniqueidentifier] NULL DEFAULT NEWID(), + [timestamp] [timestamp] NULL, +) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY] diff --git a/test/schema/enable-in-memory-oltp.sql b/test/schema/enable-in-memory-oltp.sql new file mode 100644 index 000000000..fd5dee1b9 --- /dev/null +++ b/test/schema/enable-in-memory-oltp.sql @@ -0,0 +1,81 @@ +-- https://msdn.microsoft.com/en-us/library/mt694156.aspx +-- https://raw.githubusercontent.com/Microsoft/sql-server-samples/master/samples/features/in-memory/t-sql-scripts/enable-in-memory-oltp.sql +-- +-- The below scipt enables the use of In-Memory OLTP in the current database, +-- provided it is supported in the edition / pricing tier of the database. +-- It does the following: +-- 1. Validate that In-Memory OLTP is supported. +-- 2. In SQL Server, it will add a MEMORY_OPTIMIZED_DATA filegroup to the database +-- and create a container within the filegroup in the default data folder. +-- 3. Change the database compatibility level to 130 (needed for parallel queries +-- and auto-update of statistics). +-- 4. Enables the database option MEMORY_OPTIMIZED_ELEVATE_TO_SNAPSHOT to avoid the +-- need to use the WITH (SNAPSHOT) hint for ad hoc queries accessing memory-optimized +-- tables. +-- +-- Applies To: SQL Server 2016 (or higher); Azure SQL Database +-- Author: Jos de Bruijn (Microsoft) +-- Last Updated: 2016-05-02 + +SET NOCOUNT ON; +SET XACT_ABORT ON; + +-- 1. validate that In-Memory OLTP is supported +IF SERVERPROPERTY(N'IsXTPSupported') = 0 +BEGIN + PRINT N'Error: In-Memory OLTP is not supported for this server edition or database pricing tier.'; +END +IF DB_ID() < 5 +BEGIN + PRINT N'Error: In-Memory OLTP is not supported in system databases. Connect to a user database.'; +END +ELSE +BEGIN + BEGIN TRY; +-- 2. add MEMORY_OPTIMIZED_DATA filegroup when not using Azure SQL DB + IF SERVERPROPERTY('EngineEdition') != 5 + BEGIN + DECLARE @SQLDataFolder nvarchar(max) = cast(SERVERPROPERTY('InstanceDefaultDataPath') as nvarchar(max)) + DECLARE @MODName nvarchar(max) = DB_NAME() + N'_mod'; + DECLARE @MemoryOptimizedFilegroupFolder nvarchar(max) = @SQLDataFolder + @MODName; + + DECLARE @SQL nvarchar(max) = N''; + + -- add filegroup + IF NOT EXISTS (SELECT 1 FROM sys.filegroups WHERE type = N'FX') + BEGIN + SET @SQL = N' +ALTER DATABASE CURRENT +ADD FILEGROUP ' + QUOTENAME(@MODName) + N' CONTAINS MEMORY_OPTIMIZED_DATA;'; + EXECUTE (@SQL); + + END; + + -- add container in the filegroup + IF NOT EXISTS (SELECT * FROM sys.database_files WHERE data_space_id IN (SELECT data_space_id FROM sys.filegroups WHERE type = N'FX')) + BEGIN + SET @SQL = N' +ALTER DATABASE CURRENT +ADD FILE (name = N''' + @MODName + ''', filename = ''' + + @MemoryOptimizedFilegroupFolder + N''') +TO FILEGROUP ' + QUOTENAME(@MODName); + EXECUTE (@SQL); + END + END + + -- 3. set compat level to 130 if it is lower + IF (SELECT compatibility_level FROM sys.databases WHERE database_id=DB_ID()) < 130 + ALTER DATABASE CURRENT SET COMPATIBILITY_LEVEL = 130 + + -- 4. enable MEMORY_OPTIMIZED_ELEVATE_TO_SNAPSHOT for the database + ALTER DATABASE CURRENT SET MEMORY_OPTIMIZED_ELEVATE_TO_SNAPSHOT = ON; + + + END TRY + BEGIN CATCH + PRINT N'Error enabling In-Memory OLTP'; + IF XACT_STATE() != 0 + ROLLBACK; + THROW; + END CATCH; +END; diff --git a/test/schema/sqlserver_specific_schema.rb b/test/schema/sqlserver_specific_schema.rb index 37e6150cc..68770d59a 100644 --- a/test/schema/sqlserver_specific_schema.rb +++ b/test/schema/sqlserver_specific_schema.rb @@ -1,230 +1,383 @@ +# frozen_string_literal: true + ActiveRecord::Schema.define do - - create_table :UPPER_TESTS, :force => true do |t| + # Exhaustive Data Types + + execute File.read(ARTest::SQLServer.schema_datatypes_2012_file) + + create_table :sst_datatypes_migration, force: true do |t| + # Simple Rails conventions. + t.integer :integer_col + t.bigint :bigint_col + t.boolean :boolean_col + t.decimal :decimal_col + t.float :float_col + t.string :string_col + t.text :text_col + t.datetime :datetime_nil_precision_col, precision: nil + t.datetime :datetime_col # Precision defaults to 6 + t.timestamp :timestamp_col # Precision defaults to 6 + t.time :time_col + t.date :date_col + t.binary :binary_col + # Our type methods. + t.real :real_col + t.money :money_col + t.smalldatetime :smalldatetime_col + t.datetime2 :datetime2_col + t.datetimeoffset :datetimeoffset + t.smallmoney :smallmoney_col + t.char :char_col + t.varchar :varchar_col + t.text_basic :text_basic_col + t.nchar :nchar_col + t.ntext :ntext_col + t.binary_basic :binary_basic_col + t.binary_basic :binary_basic_16_col, limit: 16 + t.varbinary :varbinary_col + t.uuid :uuid_col + t.ss_timestamp :sstimestamp_col + if supports_json? + t.json :json_col + else + t.text :json_col + end + end + + # Edge Cases + + if ENV["IN_MEMORY_OLTP"] && supports_in_memory_oltp? + create_table "sst_memory", force: true, id: false, + options: "WITH (MEMORY_OPTIMIZED = ON, DURABILITY = SCHEMA_AND_DATA)" do |t| + t.primary_key_nonclustered :id + t.string :name + t.timestamps + end + end + + create_table "sst_bookings", force: true do |t| + t.string :name + t.datetime2 :created_at, null: false + t.datetime2 :updated_at, null: false + end + + create_table "sst_uuids", force: true, id: :uuid do |t| + t.string :name + t.uuid :other_uuid, default: "NEWID()" + t.uuid :uuid_nil_default, default: nil + end + + create_table "sst_my$strange_table", force: true do |t| + t.string :name + end + + create_table :SST_UPPER_TESTS, force: true do |t| t.column :COLUMN1, :string t.column :COLUMN2, :integer end - - create_table :float_data, :force => true do |t| - t.float :temperature - t.float :temperature_8, :limit => 8 - t.float :temperature_24, :limit => 24 - t.float :temperature_32, :limit => 32 - t.float :temperature_53, :limit => 53 - end - - create_table :table_with_real_columns, :force => true do |t| - t.column :real_number, :real - end - - create_table :defaults, :force => true do |t| - t.column :positive_integer, :integer, :default => 1 - t.column :negative_integer, :integer, :default => -1 - t.column :decimal_number, :decimal, :precision => 3, :scale => 2, :default => 2.78 - end - - create_table :string_defaults, :force => true do |t| - t.column :string_with_null_default, :string, :default => nil - t.column :string_with_pretend_null_one, :string, :default => 'null' - t.column :string_with_pretend_null_two, :string, :default => '(null)' - t.column :string_with_pretend_null_three, :string, :default => 'NULL' - t.column :string_with_pretend_null_four, :string, :default => '(NULL)' - t.column :string_with_pretend_paren_three, :string, :default => '(3)' - t.column :string_with_multiline_default, :string, :default => "Some long default with a\nnew line." - end - - create_table :sql_server_chronics, :force => true do |t| - t.column :date, :date - t.column :time, :time - t.column :datetime, :datetime - t.column :timestamp, :timestamp - t.column :ss_timestamp, :ss_timestamp unless sqlserver_azure? - t.column :smalldatetime, :smalldatetime - end - - create_table(:fk_test_has_fks, :force => true) { |t| t.column(:fk_id, :integer, :null => false) } - create_table(:fk_test_has_pks, :force => true) { } - execute <<-ADDFKSQL - ALTER TABLE fk_test_has_fks - ADD CONSTRAINT FK__fk_test_has_fk_fk_id - FOREIGN KEY (#{quote_column_name('fk_id')}) - REFERENCES #{quote_table_name('fk_test_has_pks')} (#{quote_column_name('id')}) - ADDFKSQL - - create_table :sql_server_unicodes, :force => true do |t| - t.column :nchar, :nchar - t.column :nvarchar, :nvarchar - t.column :ntext, :ntext - t.column :ntext_10, :ntext, :limit => 10 - t.column :nchar_10, :nchar, :limit => 10 - t.column :nvarchar_100, :nvarchar, :limit => 100 - t.column :nvarchar_max, :nvarchar_max - t.column :nvarchar_max_10, :nvarchar_max, :limit => 10 - end - - create_table :sql_server_strings, :force => true do |t| - t.column :char, :char - t.column :char_10, :char, :limit => 10 - t.column :varchar_max, :varchar_max - t.column :varchar_max_10, :varchar_max, :limit => 10 - end - - create_table :sql_server_binary_types, :force => true do |t| - # TODO: Add some different native binary types and test. - end - - create_table 'my$strange_table', :force => true do |t| - t.column :number, :real - end - - create_table :sql_server_edge_schemas, :force => true do |t| - t.string :description - t.column :bigint, :bigint - t.column :tinyint, :tinyint - t.column :guid, :uniqueidentifier - t.column 'crazy]]quote', :string - t.column 'with spaces', :string - end - execute %|ALTER TABLE [sql_server_edge_schemas] ADD [guid_newid] uniqueidentifier DEFAULT NEWID();| - execute %|ALTER TABLE [sql_server_edge_schemas] ADD [guid_newseqid] uniqueidentifier DEFAULT NEWSEQUENTIALID();| unless sqlserver_azure? - - create_table :no_pk_data, :force => true, :id => false do |t| + + create_table :sst_no_pk_data, force: true, id: false do |t| t.string :name end - - # http://blogs.msdn.com/b/craigfr/archive/2008/03/19/ranking-functions-row-number.aspx - execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'order_row_number') DROP TABLE order_row_number" - execute <<-ORDERROWNUMBERSQL - CREATE TABLE [order_row_number] (id int IDENTITY, a int, b int, c int) - CREATE UNIQUE CLUSTERED INDEX [idx_order_row_number_id] ON [order_row_number] ([id]) - INSERT [order_row_number] VALUES (0, 1, 8) - INSERT [order_row_number] VALUES (0, 3, 6) - INSERT [order_row_number] VALUES (0, 5, 4) - INSERT [order_row_number] VALUES (0, 7, 2) - INSERT [order_row_number] VALUES (0, 9, 0) - INSERT [order_row_number] VALUES (1, 0, 9) - INSERT [order_row_number] VALUES (1, 2, 7) - INSERT [order_row_number] VALUES (1, 4, 5) - INSERT [order_row_number] VALUES (1, 6, 3) - INSERT [order_row_number] VALUES (1, 8, 1) - ORDERROWNUMBERSQL - - execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'natural_pk_data') DROP TABLE natural_pk_data" + + create_table "sst_quoted-table", force: true do |t| + end + execute "IF EXISTS (SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_NAME = 'sst_quoted-view1') DROP VIEW [sst_quoted-view1]" + execute "CREATE VIEW [sst_quoted-view1] AS SELECT * FROM [sst_quoted-table]" + execute "IF EXISTS (SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_NAME = 'sst_quoted-view2') DROP VIEW [sst_quoted-view2]" + execute "CREATE VIEW [sst_quoted-view2] AS \n /*#{"x" * 4000}}*/ \n SELECT * FROM [sst_quoted-table]" + + create_table :sst_string_defaults, force: true do |t| + t.column :string_with_null_default, :string, default: nil + t.column :string_with_pretend_null_one, :string, default: "null" + t.column :string_with_pretend_null_two, :string, default: "(null)" + t.column :string_with_pretend_null_three, :string, default: "NULL" + t.column :string_with_pretend_null_four, :string, default: "(NULL)" + t.column :string_with_pretend_paren_three, :string, default: "(3)" + t.column :string_with_multiline_default, :string, default: "Some long default with a\nnew line." + end + + create_table :sst_string_collation, collation: :SQL_Latin1_General_CP1_CI_AS, force: true do |t| + t.string :string_without_collation + t.varchar :string_default_collation, collation: :SQL_Latin1_General_CP1_CI_AS + t.varchar :string_with_collation, collation: :SQL_Latin1_General_CP1_CS_AS + t.varchar :varchar_with_collation, collation: :SQL_Latin1_General_CP1_CS_AS + end + + create_table :sst_edge_schemas, force: true do |t| + t.string :description + t.column "crazy]]quote", :string + t.column "with spaces", :string + end + + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_natural_pk_data') DROP TABLE sst_natural_pk_data" execute <<-NATURALPKTABLESQL - CREATE TABLE natural_pk_data( - parent_id int, - name nvarchar(255), - description nvarchar(1000), - legacy_id nvarchar(10) NOT NULL PRIMARY KEY + CREATE TABLE sst_natural_pk_data( + parent_id int, + name nvarchar(255), + description nvarchar(1000), + legacy_id nvarchar(10) NOT NULL PRIMARY KEY ) NATURALPKTABLESQL - - execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'natural_pk_int_data') DROP TABLE natural_pk_int_data" + + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_natural_pk_int_data') DROP TABLE sst_natural_pk_int_data" execute <<-NATURALPKINTTABLESQL - CREATE TABLE natural_pk_int_data( + CREATE TABLE sst_natural_pk_int_data( legacy_id int NOT NULL PRIMARY KEY, parent_id int, name nvarchar(255), description nvarchar(1000) ) NATURALPKINTTABLESQL - - execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'tinyint_pk_table') DROP TABLE tinyint_pk_table" + + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_tinyint_pk') DROP TABLE sst_tinyint_pk" execute <<-TINYITPKTABLE - CREATE TABLE tinyint_pk_table( - id tinyint NOT NULL PRIMARY KEY, + CREATE TABLE sst_tinyint_pk( + id tinyint IDENTITY NOT NULL PRIMARY KEY, name nvarchar(255) ) TINYITPKTABLE - - create_table 'quoted-table', :force => true do |t| - end - execute "IF EXISTS (SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_NAME = 'quoted-view1') DROP VIEW [quoted-view1]" - execute "CREATE VIEW [quoted-view1] AS SELECT * FROM [quoted-table]" - execute "IF EXISTS (SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_NAME = 'quoted-view2') DROP VIEW [quoted-view2]" - execute "CREATE VIEW [quoted-view2] AS \n /*#{'x'*4000}}*/ \n SELECT * FROM [quoted-table]" - - execute "IF EXISTS (SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_NAME = 'customers_view') DROP VIEW customers_view" + + begin + execute "DROP DEFAULT [sst_getdateobject];" + rescue + nil + end + begin + execute "CREATE DEFAULT [sst_getdateobject] AS getdate();" + rescue + nil + end + create_table "sst_defaultobjects", force: true do |t| + t.string :name + t.date :date + end + execute "sp_bindefault 'sst_getdateobject', 'sst_defaultobjects.date'" + + begin + execute "DROP PROCEDURE my_getutcdate" + rescue + nil + end + execute <<-SQL + CREATE PROCEDURE my_getutcdate AS + SELECT GETUTCDATE() utcdate + SQL + + create_table "A Table With Spaces", force: true do |t| + t.string :name + end + + # Constraints + + create_table(:sst_has_fks, force: true) do |t| + t.column(:fk_id, :bigint, null: false) + t.column(:fk_id2, :bigint) + end + + create_table(:sst_has_pks, force: true) {} + execute <<-ADDFKSQL + ALTER TABLE sst_has_fks + ADD CONSTRAINT FK__sst_has_fks_id + FOREIGN KEY ([fk_id]) + REFERENCES [sst_has_pks] ([id]), + + CONSTRAINT FK__sst_has_fks_id2 + FOREIGN KEY ([fk_id2]) + REFERENCES [sst_has_pks] ([id]) + ADDFKSQL + + # Views + + execute "IF EXISTS (SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_NAME = 'sst_customers_view') DROP VIEW sst_customers_view" execute <<-CUSTOMERSVIEW - CREATE VIEW customers_view AS + CREATE VIEW sst_customers_view AS SELECT id, name, balance FROM customers CUSTOMERSVIEW - execute "IF EXISTS (SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_NAME = 'string_defaults_view') DROP VIEW string_defaults_view" + execute "IF EXISTS (SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_NAME = 'sst_string_defaults_view') DROP VIEW sst_string_defaults_view" execute <<-STRINGDEFAULTSVIEW - CREATE VIEW string_defaults_view AS + CREATE VIEW sst_string_defaults_view AS SELECT id, string_with_pretend_null_one as pretend_null - FROM string_defaults + FROM sst_string_defaults STRINGDEFAULTSVIEW - - execute "IF EXISTS (SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_NAME = 'string_defaults_big_view') DROP VIEW string_defaults_big_view" + + execute "IF EXISTS (SELECT TABLE_NAME FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_NAME = 'sst_string_defaults_big_view') DROP VIEW sst_string_defaults_big_view" execute <<-STRINGDEFAULTSBIGVIEW - CREATE VIEW string_defaults_big_view AS + CREATE VIEW sst_string_defaults_big_view AS SELECT id, string_with_pretend_null_one as pretend_null - /*#{'x'*4000}}*/ - FROM string_defaults + /*#{"x" * 4000}}*/ + FROM sst_string_defaults STRINGDEFAULTSBIGVIEW + # Trigger + + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_table_with_trigger') DROP TABLE sst_table_with_trigger" + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_table_with_trigger_history') DROP TABLE sst_table_with_trigger_history" + execute <<-SQL + CREATE TABLE sst_table_with_trigger( + id bigint IDENTITY NOT NULL PRIMARY KEY, + event_name nvarchar(255) + ) + CREATE TABLE sst_table_with_trigger_history( + id bigint IDENTITY NOT NULL PRIMARY KEY, + id_source nvarchar(36), + event_name nvarchar(255) + ) + SQL + execute <<-SQL + CREATE TRIGGER sst_table_with_trigger_t ON sst_table_with_trigger + FOR INSERT + AS + INSERT INTO sst_table_with_trigger_history (id_source, event_name) + SELECT id AS id_source, event_name FROM INSERTED + SQL + + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_table_with_uuid_trigger') DROP TABLE sst_table_with_uuid_trigger" + execute <<-SQL + CREATE TABLE sst_table_with_uuid_trigger( + id uniqueidentifier DEFAULT NEWID() PRIMARY KEY, + event_name nvarchar(255) + ) + SQL + execute <<-SQL + CREATE TRIGGER sst_table_with_uuid_trigger_t ON sst_table_with_uuid_trigger + FOR INSERT + AS + INSERT INTO sst_table_with_trigger_history (id_source, event_name) + SELECT id AS id_source, event_name FROM INSERTED + SQL + + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_table_with_composite_pk_trigger') DROP TABLE sst_table_with_composite_pk_trigger" + execute <<-SQL + CREATE TABLE sst_table_with_composite_pk_trigger( + pk_col_one int NOT NULL, + pk_col_two int NOT NULL, + event_name nvarchar(255), + CONSTRAINT PK_sst_table_with_composite_pk_trigger PRIMARY KEY (pk_col_one, pk_col_two) + ) + SQL + execute <<-SQL + CREATE TRIGGER sst_table_with_composite_pk_trigger_t ON sst_table_with_composite_pk_trigger + FOR INSERT + AS + INSERT INTO sst_table_with_trigger_history (id_source, event_name) + SELECT pk_col_one AS id_source, event_name FROM INSERTED + SQL + + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_table_with_composite_pk_trigger_with_different_data_type') DROP TABLE sst_table_with_composite_pk_trigger_with_different_data_type" + execute <<-SQL + CREATE TABLE sst_table_with_composite_pk_trigger_with_different_data_type( + pk_col_one uniqueidentifier DEFAULT NEWID(), + pk_col_two int NOT NULL, + event_name nvarchar(255), + CONSTRAINT PK_sst_table_with_composite_pk_trigger_with_different_data_type PRIMARY KEY (pk_col_one, pk_col_two) + ) + SQL + execute <<-SQL + CREATE TRIGGER sst_table_with_composite_pk_trigger_with_different_data_type_t ON sst_table_with_composite_pk_trigger_with_different_data_type + FOR INSERT + AS + INSERT INTO sst_table_with_trigger_history (id_source, event_name) + SELECT pk_col_one AS id_source, event_name FROM INSERTED + SQL # Another schema. - - create_table :sql_server_schema_columns, :force => true do |t| - t.column :field1 , :integer + + create_table :sst_schema_columns, force: true do |t| + t.column :field1, :integer end - + execute "IF NOT EXISTS(SELECT * FROM sys.schemas WHERE name = 'test') EXEC sp_executesql N'CREATE SCHEMA test'" - execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sql_server_schema_columns' and TABLE_SCHEMA = 'test') DROP TABLE test.sql_server_schema_columns" + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_schema_columns' and TABLE_SCHEMA = 'test') DROP TABLE test.sst_schema_columns" execute <<-SIMILIARTABLEINOTHERSCHEMA - CREATE TABLE test.sql_server_schema_columns( - id int IDENTITY NOT NULL primary key, - filed_1 int, - field_2 int, - name varchar(255), - description varchar(1000), - n_name nvarchar(255), - n_description nvarchar(1000) + CREATE TABLE test.sst_schema_columns( + id int IDENTITY NOT NULL primary key, + filed_1 int, + field_2 int, + name varchar(255), + description varchar(1000), + n_name nvarchar(255), + n_description nvarchar(1000) ) SIMILIARTABLEINOTHERSCHEMA - - execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sql_server_schema_identity' and TABLE_SCHEMA = 'test') DROP TABLE test.sql_server_schema_identity" + + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_schema_identity' and TABLE_SCHEMA = 'test') DROP TABLE test.sst_schema_identity" execute <<-SIMILIARTABLEINOTHERSCHEMA - CREATE TABLE test.sql_server_schema_identity( - id int IDENTITY NOT NULL primary key, - filed_1 int + CREATE TABLE test.sst_schema_identity( + id int IDENTITY NOT NULL primary key, + filed_1 int ) SIMILIARTABLEINOTHERSCHEMA - - execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sql_server_schema_natural_id' and TABLE_SCHEMA = 'test') DROP TABLE test.sql_server_schema_natural_id" + + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_schema_natural_id' and TABLE_SCHEMA = 'test') DROP TABLE test.sst_schema_natural_id" execute <<-NATURALPKTABLESQLINOTHERSCHEMA - CREATE TABLE test.sql_server_schema_natural_id( - parent_id int, - name nvarchar(255), - description nvarchar(1000), - legacy_id nvarchar(10) NOT NULL PRIMARY KEY, + CREATE TABLE test.sst_schema_natural_id( + parent_id int, + name nvarchar(255), + description nvarchar(1000), + legacy_id nvarchar(10) NOT NULL PRIMARY KEY, ) NATURALPKTABLESQLINOTHERSCHEMA - - - # Azure needs clustered indexes - if sqlserver_azure? - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_schema_migrations_version') CREATE CLUSTERED INDEX [idx_schema_migrations_version] ON [schema_migrations] ([version])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_countries_ctryid') CREATE CLUSTERED INDEX [idx_countries_ctryid] ON [countries] ([country_id])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_treaty_id_trtyid') CREATE CLUSTERED INDEX [idx_treaty_id_trtyid] ON [treaties] ([treaty_id])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_no_pk_data_name') CREATE CLUSTERED INDEX [idx_no_pk_data_name] ON [no_pk_data] ([name])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_developers_projects_did_pid') CREATE CLUSTERED INDEX [idx_developers_projects_did_pid] ON [developers_projects] ([developer_id],[project_id])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_categories_posts_cid_pid') CREATE CLUSTERED INDEX [idx_categories_posts_cid_pid] ON [categories_posts] ([category_id],[post_id])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_dashboards_dashboard_id') CREATE CLUSTERED INDEX [idx_dashboards_dashboard_id] ON [dashboards] ([dashboard_id])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_edges_source_id_sink_id') CREATE CLUSTERED INDEX [idx_edges_source_id_sink_id] ON [edges] ([source_id],[sink_id])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_goofy_string_id_id') CREATE CLUSTERED INDEX [idx_goofy_string_id_id] ON [goofy_string_id] ([id])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_lessons_students_lid_sid') CREATE CLUSTERED INDEX [idx_lessons_students_lid_sid] ON [lessons_students] ([lesson_id],[student_id])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_mateys_pid_tid') CREATE CLUSTERED INDEX [idx_mateys_pid_tid] ON [mateys] ([pirate_id],[target_id])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_minivans_minivan_id') CREATE CLUSTERED INDEX [idx_minivans_minivan_id] ON [minivans] ([minivan_id])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_parrots_pirates_paid_pid') CREATE CLUSTERED INDEX [idx_parrots_pirates_paid_pid] ON [parrots_pirates] ([parrot_id],[pirate_id])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_parrots_treasures_pid_tid') CREATE CLUSTERED INDEX [idx_parrots_treasures_pid_tid] ON [parrots_treasures] ([parrot_id],[treasure_id])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_speedometers_speedometer_id') CREATE CLUSTERED INDEX [idx_speedometers_speedometer_id] ON [speedometers] ([speedometer_id])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_subscribers_nick') CREATE CLUSTERED INDEX [idx_subscribers_nick] ON [subscribers] ([nick])" - execute "IF NOT EXISTS (SELECT [name] FROM [sys].[indexes] WHERE [name] = N'idx_countries_treaties_cid_tid') CREATE CLUSTERED INDEX [idx_countries_treaties_cid_tid] ON [countries_treaties] ([country_id],[treaty_id])" - end - + + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_schema_test_multiple_schema' and TABLE_SCHEMA = 'test') DROP TABLE test.sst_schema_test_multiple_schema" + execute <<-SCHEMATESTMULTIPLESCHEMA + CREATE TABLE test.sst_schema_test_multiple_schema( + field_1 int NOT NULL PRIMARY KEY, + field_2 int, + ) + SCHEMATESTMULTIPLESCHEMA + execute "IF NOT EXISTS(SELECT * FROM sys.schemas WHERE name = 'test2') EXEC sp_executesql N'CREATE SCHEMA test2'" + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_schema_test_multiple_schema' and TABLE_SCHEMA = 'test2') DROP TABLE test2.sst_schema_test_multiple_schema" + execute <<-SCHEMATESTMULTIPLESCHEMA + CREATE TABLE test2.sst_schema_test_multiple_schema( + field_1 int, + field_2 int NOT NULL PRIMARY KEY, + ) + SCHEMATESTMULTIPLESCHEMA + + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'unique_key_dumped_table') DROP TABLE unique_key_dumped_table" + execute <<-SQLSERVERUNIQUEKEYS + CREATE TABLE unique_key_dumped_table ( + id int IDENTITY(1,1) NOT NULL, + unique_field int DEFAULT 0 NOT NULL, + CONSTRAINT IX_UNIQUE_KEY UNIQUE (unique_field), + CONSTRAINT PK_UNIQUE_KEY PRIMARY KEY (id) + ); + SQLSERVERUNIQUEKEYS + + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_composite_without_identity') DROP TABLE sst_composite_without_identity" + execute <<-COMPOSITE_WITHOUT_IDENTITY + CREATE TABLE sst_composite_without_identity ( + pk_col_one int NOT NULL, + pk_col_two int NOT NULL, + CONSTRAINT PK_sst_composite_without_identity PRIMARY KEY (pk_col_one, pk_col_two) + ); + COMPOSITE_WITHOUT_IDENTITY + + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'sst_composite_with_identity') DROP TABLE sst_composite_with_identity" + execute <<-COMPOSITE_WITH_IDENTITY + CREATE TABLE sst_composite_with_identity ( + pk_col_one int IDENTITY NOT NULL, + pk_col_two int NOT NULL, + CONSTRAINT PK_sst_composite_with_identity PRIMARY KEY (pk_col_one, pk_col_two) + ); + COMPOSITE_WITH_IDENTITY + + execute "IF EXISTS(SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'aliens' and TABLE_SCHEMA = 'test') DROP TABLE test.aliens" + execute <<-TABLE_IN_OTHER_SCHEMA_USED_BY_MODEL + CREATE TABLE test.aliens( + id int IDENTITY NOT NULL primary key, + name varchar(255) + ) + TABLE_IN_OTHER_SCHEMA_USED_BY_MODEL + + create_table "recurring_tasks", force: true do |t| + t.string :key + t.integer :priority, default: 0 + + t.datetime2 :created_at + t.datetime2 :updated_at + end end diff --git a/test/support/coerceable_test_sqlserver.rb b/test/support/coerceable_test_sqlserver.rb new file mode 100644 index 000000000..37a175db7 --- /dev/null +++ b/test/support/coerceable_test_sqlserver.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +module ARTest + module SQLServer + module CoerceableTest + extend ActiveSupport::Concern + + included do + cattr_accessor :coerced_tests, instance_accessor: false + self.coerced_tests = [] + end + + module ClassMethods + def coerce_tests!(*methods) + methods.each do |method| + coerced_tests.push(method) + coerced_test_warning(method) + end + end + + def coerce_all_tests! + instance_methods(false).each do |method| + next unless method.to_s.start_with?("test") + + undef_method(method) + end + $stdout.puts "🙉 🙈 🙊 Undefined all tests: #{name}" + end + + private + + def coerced_test_warning(test_to_coerce) + method = if test_to_coerce.is_a?(Regexp) + instance_methods(false).select { |m| m =~ test_to_coerce } + else + test_to_coerce + end + + Array(method).each do |m| + result = if m && method_defined?(m) + alias_method("original_#{test_to_coerce.inspect.tr('/\:"', "")}", m) + undef_method(m) + end + + if result.blank? + $stdout.puts "🐳 Unfound coerced test: #{name}##{m}" + else + $stdout.puts "🐵 Undefined coerced test: #{name}##{m}" + end + end + end + end + end + end +end diff --git a/test/support/connection_reflection.rb b/test/support/connection_reflection.rb new file mode 100644 index 000000000..d41605ef5 --- /dev/null +++ b/test/support/connection_reflection.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +module ARTest + module SQLServer + module ConnectionReflection + extend ActiveSupport::Concern + + included { extend ConnectionReflection } + + def connection + ActiveRecord::Base.lease_connection + end + + def connection_options + connection.instance_variable_get :@connection_parameters + end + + def connection_sqlserver_azure? + connection.sqlserver_azure? + end + end + end +end diff --git a/test/support/core_ext/backtrace_cleaner.rb b/test/support/core_ext/backtrace_cleaner.rb new file mode 100644 index 000000000..54e2b3b8e --- /dev/null +++ b/test/support/core_ext/backtrace_cleaner.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +# Need to handle `ActiveRecord` lines like they are in the source rather than in the Rails gem. +module SQLServer + module BacktraceCleaner + extend ActiveSupport::Concern + + private + + def add_gem_filter + gems_paths = (Gem.path | [Gem.default_dir]).map { |p| Regexp.escape(p) } + return if gems_paths.empty? + + gems_regexp = %r{\A(#{gems_paths.join("|")})/(bundler/)?gems/([^/]+)-([\w.]+)/(.*)} + gems_result = '\3 (\4) \5' + + add_filter do |line| + if line.match?(/activerecord/) + line + else + line.sub(gems_regexp, gems_result) + end + end + end + + def add_gem_silencer + add_silencer do |line| + ActiveSupport::BacktraceCleaner::FORMATTED_GEMS_PATTERN.match?(line) && !/activerecord/.match?(line) + end + end + end +end + +ActiveSupport.on_load(:active_record) do + ActiveSupport::BacktraceCleaner.prepend(SQLServer::BacktraceCleaner) +end diff --git a/test/support/core_ext/query_cache.rb b/test/support/core_ext/query_cache.rb new file mode 100644 index 000000000..0679c42aa --- /dev/null +++ b/test/support/core_ext/query_cache.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +require "active_record/connection_adapters/sqlserver_adapter" + +module SqlIgnoredCache + extend ActiveSupport::Concern + + IGNORED_SQL = [ + /INFORMATION_SCHEMA\.(TABLES|VIEWS|COLUMNS|KEY_COLUMN_USAGE)/im, + /sys.columns/i, + /SELECT @@version/, + /SELECT @@TRANCOUNT/, + /(BEGIN|COMMIT|ROLLBACK|SAVE) TRANSACTION/, + /SELECT CAST\(.* AS .*\) AS value/, + /SELECT DATABASEPROPERTYEX/im + ] + + # We don't want to coerce every ActiveRecord test that relies on `query_cache` + # just because we do more queries than the other adapters. + # + # Removing internal queries from the cache will make AR tests pass without + # compromising cache outside tests. + def cache_sql(sql, name, binds) + result = super + + @query_cache.instance_variable_get(:@map).delete_if do |cache_key, _v| + # Query cache key generated by `sql` or `[sql, binds]`, so need to retrieve `sql` for both cases. + cache_key_sql = Array(cache_key).first + Regexp.union(IGNORED_SQL).match?(cache_key_sql) + end + + result + end +end + +ActiveSupport.on_load(:active_record) do + ActiveRecord::ConnectionAdapters::SQLServerAdapter.prepend(SqlIgnoredCache) +end diff --git a/test/support/load_schema_sqlserver.rb b/test/support/load_schema_sqlserver.rb new file mode 100644 index 000000000..73ff9219c --- /dev/null +++ b/test/support/load_schema_sqlserver.rb @@ -0,0 +1,29 @@ +# frozen_string_literal: true + +module ARTest + module SQLServer + extend self + + def schema_root + File.join ARTest::SQLServer.test_root_sqlserver, "schema" + end + + def schema_file + File.join schema_root, "sqlserver_specific_schema.rb" + end + + def schema_datatypes_2012_file + File.join schema_root, "datatypes", "2012.sql" + end + + def load_schema + original_stdout = $stdout + $stdout = StringIO.new + load schema_file + ensure + $stdout = original_stdout + end + end +end + +ARTest::SQLServer.load_schema diff --git a/test/support/marshal_compatibility_fixtures/SQLServer/rails_6_1_topic.dump b/test/support/marshal_compatibility_fixtures/SQLServer/rails_6_1_topic.dump new file mode 100644 index 000000000..56f191de9 Binary files /dev/null and b/test/support/marshal_compatibility_fixtures/SQLServer/rails_6_1_topic.dump differ diff --git a/test/support/marshal_compatibility_fixtures/SQLServer/rails_6_1_topic_associations.dump b/test/support/marshal_compatibility_fixtures/SQLServer/rails_6_1_topic_associations.dump new file mode 100644 index 000000000..9278beeee Binary files /dev/null and b/test/support/marshal_compatibility_fixtures/SQLServer/rails_6_1_topic_associations.dump differ diff --git a/test/support/marshal_compatibility_fixtures/SQLServer/rails_7_1_topic.dump b/test/support/marshal_compatibility_fixtures/SQLServer/rails_7_1_topic.dump new file mode 100644 index 000000000..e1967b468 Binary files /dev/null and b/test/support/marshal_compatibility_fixtures/SQLServer/rails_7_1_topic.dump differ diff --git a/test/support/marshal_compatibility_fixtures/SQLServer/rails_7_1_topic_associations.dump b/test/support/marshal_compatibility_fixtures/SQLServer/rails_7_1_topic_associations.dump new file mode 100644 index 000000000..a9f8888ae Binary files /dev/null and b/test/support/marshal_compatibility_fixtures/SQLServer/rails_7_1_topic_associations.dump differ diff --git a/test/support/minitest_sqlserver.rb b/test/support/minitest_sqlserver.rb new file mode 100644 index 000000000..3fb0c9ed0 --- /dev/null +++ b/test/support/minitest_sqlserver.rb @@ -0,0 +1,3 @@ +# frozen_string_literal: true + +require "minitest-spec-rails/init/active_support" diff --git a/test/support/paths_sqlserver.rb b/test/support/paths_sqlserver.rb new file mode 100644 index 000000000..5d2735d6a --- /dev/null +++ b/test/support/paths_sqlserver.rb @@ -0,0 +1,50 @@ +# frozen_string_literal: true + +module ARTest + module SQLServer + extend self + + def root_sqlserver + File.expand_path File.join(File.dirname(__FILE__), "..", "..") + end + + def test_root_sqlserver + File.join root_sqlserver, "test" + end + + def root_activerecord + File.join Gem.loaded_specs["rails"].full_gem_path, "activerecord" + end + + def root_activerecord_lib + File.join root_activerecord, "lib" + end + + def root_activerecord_test + File.join root_activerecord, "test" + end + + def test_load_paths + ["lib", "test", root_activerecord_lib, root_activerecord_test] + end + + def add_to_load_paths! + test_load_paths.each { |p| $LOAD_PATH.unshift(p) unless $LOAD_PATH.include?(p) } + end + + def migrations_root + File.join test_root_sqlserver, "migrations" + end + + def arconfig_file + File.join test_root_sqlserver, "config.yml" + end + + def arconfig_file_env! + ENV["ARCONFIG"] = arconfig_file + end + end +end + +ARTest::SQLServer.add_to_load_paths! +ARTest::SQLServer.arconfig_file_env! diff --git a/test/support/query_assertions.rb b/test/support/query_assertions.rb new file mode 100644 index 000000000..78dfce02d --- /dev/null +++ b/test/support/query_assertions.rb @@ -0,0 +1,71 @@ +module ARTest + module SQLServer + module QueryAssertions + def assert_queries_count(count = nil, include_schema: false, &block) + ActiveRecord::Base.lease_connection.materialize_transactions + + counter = ActiveRecord::Assertions::QueryAssertions::SQLCounter.new + ActiveSupport::Notifications.subscribed(counter, "sql.active_record") do + result = _assert_nothing_raised_or_warn("assert_queries_count", &block) + queries = include_schema ? counter.log_all : counter.log + + # Start of monkey-patch + queries = include_release_savepoint_placeholder_queries(queries) + # End of monkey-patch + + if count + assert_equal count, queries.size, "#{queries.size} instead of #{count} queries were executed. Queries: #{queries.join("\n\n")}" + else + assert_operator queries.size, :>=, 1, "1 or more queries expected, but none were executed.#{"\nQueries:\n#{queries.join("\n")}" unless queries.empty?}" + end + result + end + end + + def assert_queries_and_values_match(match, bound_values = [], count: nil, &block) + ActiveRecord::Base.lease_connection.materialize_transactions + + counter = ActiveRecord::Assertions::QueryAssertions::SQLCounter.new + ActiveSupport::Notifications.subscribed(counter, "sql.active_record") do + result = _assert_nothing_raised_or_warn("assert_queries_match", &block) + queries = counter.log_full + matched_queries = queries.select do |query, values| + values = values.map { |v| v.respond_to?(:quoted) ? v.quoted : v } + match === query && bound_values === values + end + + if count + assert_equal count, matched_queries.size, "#{matched_queries.size} instead of #{count} queries were executed.#{"\nQueries:\n#{counter.log.join("\n")}" unless count.log.empty?}" + else + assert_operator matched_queries.size, :>=, 1, "1 or more queries expected, but none were executed.#{"\nQueries:\n#{counter.log.join("\n")}" unless counter.log.empty?}" + end + + result + end + end + + private + + # Rails tests expect a save-point to be created and released. SQL Server does not release + # save-points and so the number of queries will be off. This monkey patch adds a placeholder queries + # to replace the missing save-point releases. + def include_release_savepoint_placeholder_queries(queries) + grouped_queries = [[]] + + queries.each do |query| + if /SAVE TRANSACTION \S+/.match?(query) + grouped_queries << [query] + else + grouped_queries.last << query + end + end + + grouped_queries.each do |group| + group.append "/* release savepoint placeholder for testing */" if /SAVE TRANSACTION \S+/.match?(group.first) + end + + grouped_queries.flatten + end + end + end +end diff --git a/test/support/rake_helpers.rb b/test/support/rake_helpers.rb new file mode 100644 index 000000000..8f0af5376 --- /dev/null +++ b/test/support/rake_helpers.rb @@ -0,0 +1,42 @@ +# frozen_string_literal: true + +SQLSERVER_HELPER = "test/cases/helper_sqlserver.rb" +SQLSERVER_COERCED = "test/cases/coerced_tests.rb" + +def env_ar_test_files + return unless ENV["TEST_FILES_AR"] && !ENV["TEST_FILES_AR"].empty? + + @env_ar_test_files ||= ENV["TEST_FILES_AR"].split(",").map { |file| + File.join ARTest::SQLServer.root_activerecord, file.strip + }.sort +end + +def env_test_files + return unless ENV["TEST_FILES"] && !ENV["TEST_FILES"].empty? + + @env_test_files ||= ENV["TEST_FILES"].split(",").map(&:strip) +end + +def sqlserver_cases + @sqlserver_cases ||= Dir.glob("test/cases/*_test_sqlserver.rb") +end + +def ar_cases + @ar_cases ||= Dir.glob("#{ARTest::SQLServer.root_activerecord}/test/cases/**/*_test.rb").reject { |x| + x.include?("/adapters/") || x.include?("/encryption/performance") + }.sort +end + +def test_files + if env_ar_test_files + [SQLSERVER_HELPER] + env_ar_test_files + elsif env_test_files + env_test_files + elsif ENV["ONLY_SQLSERVER"] + sqlserver_cases + elsif ENV["ONLY_ACTIVERECORD"] + [SQLSERVER_HELPER] + (ar_cases + [SQLSERVER_COERCED]) + else + [SQLSERVER_HELPER] + (ar_cases + [SQLSERVER_COERCED] + sqlserver_cases) + end +end diff --git a/test/support/table_definition_sqlserver.rb b/test/support/table_definition_sqlserver.rb new file mode 100644 index 000000000..7be97923a --- /dev/null +++ b/test/support/table_definition_sqlserver.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module ActiveRecord + module ConnectionAdapters + module SQLServer + class TableDefinition < ::ActiveRecord::ConnectionAdapters::TableDefinition + # SQL Server supports precision of 38 for decimal columns. In Rails the test schema includes a column + # with a precision of 55. This is a problem for SQL Server 2008. This method will override the default + # decimal method to limit the precision to 38 for the :atoms_in_universe column. + # See https://github.com/rails/rails/pull/51826/files#diff-2a57b61bbf9ee2c23938fc571d403799f68b4b530d65e2cde219a429bbf10af5L876 + def decimal(*names, **options) + throw "This 'decimal' method should only be used in a test environment." unless defined?(ActiveSupport::TestCase) + + names.each do |name| + options_for_name = options.dup + options_for_name[:precision] = 38 if name == :atoms_in_universe && options_for_name[:precision].to_i == 55 + + column(name, :decimal, **options_for_name) + end + end + end + end + end +end diff --git a/test/support/test_in_memory_oltp.rb b/test/support/test_in_memory_oltp.rb new file mode 100644 index 000000000..2b5bb0c19 --- /dev/null +++ b/test/support/test_in_memory_oltp.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +if ENV["IN_MEMORY_OLTP"] + require "config" + require "active_record" + require "support/config" + require "support/connection" + + ARTest.connect + + if ActiveRecord::Base.lease_connection.supports_in_memory_oltp? + puts "Configuring In-Memory OLTP..." + inmem_file = ARTest::SQLServer.test_root_sqlserver, "schema", "enable-in-memory-oltp.sql" + inmem_sql = File.read File.join(inmem_file) + ActiveRecord::Base.lease_connection.execute(inmem_sql) + end +end