diff --git a/.docker/clickhouse/cluster/server1_config.xml b/.docker/clickhouse/cluster/server1_config.xml
new file mode 100644
index 00000000..ecebb8c3
--- /dev/null
+++ b/.docker/clickhouse/cluster/server1_config.xml
@@ -0,0 +1,117 @@
+
+
+
+ 8123
+ 9009
+ clickhouse1
+
+ users.xml
+ default
+ default
+
+ 5368709120
+
+ /var/lib/clickhouse/
+ /var/lib/clickhouse/tmp/
+ /var/lib/clickhouse/user_files/
+ /var/lib/clickhouse/access/
+ 3
+
+
+ debug
+ /var/log/clickhouse-server/clickhouse-server.log
+ /var/log/clickhouse-server/clickhouse-server.err.log
+ 1000M
+ 10
+ 1
+
+
+
+
+
+
+ clickhouse1
+ 9000
+
+
+ clickhouse2
+ 9000
+
+
+
+
+
+
+ 9181
+ 1
+ /var/lib/clickhouse/coordination/log
+ /var/lib/clickhouse/coordination/snapshots
+
+
+ 10000
+ 30000
+ trace
+ 10000
+
+
+
+
+ 1
+ clickhouse1
+ 9000
+
+
+ 2
+ clickhouse2
+ 9000
+
+
+
+
+
+
+ clickhouse1
+ 9181
+
+
+ clickhouse2
+ 9181
+
+
+
+
+ test_cluster
+ clickhouse1
+ 1
+
+
+
+ /clickhouse/test_cluster/task_queue/ddl
+
+
+
+ system
+
+ toYYYYMM(event_date)
+ 1000
+
+
+
+
+ Access-Control-Allow-Origin
+ *
+
+
+ Access-Control-Allow-Headers
+ accept, origin, x-requested-with, content-type, authorization
+
+
+ Access-Control-Allow-Methods
+ POST, GET, OPTIONS
+
+
+ Access-Control-Max-Age
+ 86400
+
+
+
diff --git a/.docker/clickhouse/cluster/server2_config.xml b/.docker/clickhouse/cluster/server2_config.xml
new file mode 100644
index 00000000..83d7bbb1
--- /dev/null
+++ b/.docker/clickhouse/cluster/server2_config.xml
@@ -0,0 +1,117 @@
+
+
+
+ 8123
+ 9009
+ clickhouse2
+
+ users.xml
+ default
+ default
+
+ 5368709120
+
+ /var/lib/clickhouse/
+ /var/lib/clickhouse/tmp/
+ /var/lib/clickhouse/user_files/
+ /var/lib/clickhouse/access/
+ 3
+
+
+ debug
+ /var/log/clickhouse-server/clickhouse-server.log
+ /var/log/clickhouse-server/clickhouse-server.err.log
+ 1000M
+ 10
+ 1
+
+
+
+
+
+
+ clickhouse1
+ 9000
+
+
+ clickhouse2
+ 9000
+
+
+
+
+
+
+ 9181
+ 2
+ /var/lib/clickhouse/coordination/log
+ /var/lib/clickhouse/coordination/snapshots
+
+
+ 10000
+ 30000
+ trace
+ 10000
+
+
+
+
+ 1
+ clickhouse1
+ 9000
+
+
+ 2
+ clickhouse2
+ 9000
+
+
+
+
+
+
+ clickhouse1
+ 9181
+
+
+ clickhouse2
+ 9181
+
+
+
+
+ test_cluster
+ clickhouse2
+ 1
+
+
+
+ /clickhouse/test_cluster/task_queue/ddl
+
+
+
+ system
+
+ toYYYYMM(event_date)
+ 1000
+
+
+
+
+ Access-Control-Allow-Origin
+ *
+
+
+ Access-Control-Allow-Headers
+ accept, origin, x-requested-with, content-type, authorization
+
+
+ Access-Control-Allow-Methods
+ POST, GET, OPTIONS
+
+
+ Access-Control-Max-Age
+ 86400
+
+
+
diff --git a/.docker/clickhouse/single/config.xml b/.docker/clickhouse/single/config.xml
new file mode 100644
index 00000000..218229cd
--- /dev/null
+++ b/.docker/clickhouse/single/config.xml
@@ -0,0 +1,54 @@
+
+
+
+ 8123
+ 9000
+
+ users.xml
+ default
+ default
+
+ 5368709120
+
+ /var/lib/clickhouse/
+ /var/lib/clickhouse/tmp/
+ /var/lib/clickhouse/user_files/
+ /var/lib/clickhouse/access/
+ 3
+
+
+ debug
+ /var/log/clickhouse-server/clickhouse-server.log
+ /var/log/clickhouse-server/clickhouse-server.err.log
+ 1000M
+ 10
+ 1
+
+
+
+ system
+
+ toYYYYMM(event_date)
+ 1000
+
+
+
+
+ Access-Control-Allow-Origin
+ *
+
+
+ Access-Control-Allow-Headers
+ accept, origin, x-requested-with, content-type, authorization
+
+
+ Access-Control-Allow-Methods
+ POST, GET, OPTIONS
+
+
+ Access-Control-Max-Age
+ 86400
+
+
+
+
diff --git a/.docker/clickhouse/users.xml b/.docker/clickhouse/users.xml
new file mode 100644
index 00000000..61188536
--- /dev/null
+++ b/.docker/clickhouse/users.xml
@@ -0,0 +1,34 @@
+
+
+
+
+
+ random
+
+
+
+
+
+
+
+ ::/0
+
+ default
+ default
+ 1
+
+
+
+
+
+
+ 3600
+ 0
+ 0
+ 0
+ 0
+ 0
+
+
+
+
diff --git a/.docker/docker-compose.cluster.yml b/.docker/docker-compose.cluster.yml
new file mode 100644
index 00000000..d3ce9996
--- /dev/null
+++ b/.docker/docker-compose.cluster.yml
@@ -0,0 +1,52 @@
+version: '3.5'
+
+services:
+ clickhouse1:
+ image: 'clickhouse/clickhouse-server:${CLICKHOUSE_VERSION-23.11-alpine}'
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ hostname: clickhouse1
+ container_name: clickhouse-activerecord-clickhouse-server-1
+ ports:
+ - '8124:8123'
+ - '9001:9000'
+ volumes:
+ - './clickhouse/cluster/server1_config.xml:/etc/clickhouse-server/config.xml'
+ - './clickhouse/users.xml:/etc/clickhouse-server/users.xml'
+ healthcheck:
+ test: bash -c "exec 6<> /dev/tcp/localhost/8123"
+ interval: 5s
+
+ clickhouse2:
+ image: 'clickhouse/clickhouse-server:${CLICKHOUSE_VERSION-23.11-alpine}'
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ hostname: clickhouse2
+ container_name: clickhouse-activerecord-clickhouse-server-2
+ ports:
+ - '8125:8123'
+ volumes:
+ - './clickhouse/cluster/server2_config.xml:/etc/clickhouse-server/config.xml'
+ - './clickhouse/users.xml:/etc/clickhouse-server/users.xml'
+ healthcheck:
+ test: bash -c "exec 6<> /dev/tcp/localhost/8123"
+ interval: 5s
+
+ # Using Nginx as a cluster entrypoint and a round-robin load balancer for HTTP requests
+ nginx:
+ image: 'nginx:1.23.1-alpine'
+ hostname: nginx
+ ports:
+ - '28123:8123'
+ volumes:
+ - './nginx/local.conf:/etc/nginx/conf.d/local.conf'
+ container_name: clickhouse-activerecord-nginx
+ depends_on:
+ clickhouse1:
+ condition: service_healthy
+ clickhouse2:
+ condition: service_healthy
diff --git a/.docker/docker-compose.yml b/.docker/docker-compose.yml
new file mode 100644
index 00000000..5e3ce482
--- /dev/null
+++ b/.docker/docker-compose.yml
@@ -0,0 +1,17 @@
+version: '3.8'
+services:
+ clickhouse:
+ image: 'clickhouse/clickhouse-server:${CLICKHOUSE_VERSION-23.11-alpine}'
+ container_name: 'clickhouse-activerecord-clickhouse-server'
+ ports:
+ - '18123:8123'
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ volumes:
+ - './clickhouse/single/config.xml:/etc/clickhouse-server/config.xml'
+ - './clickhouse/users.xml:/etc/clickhouse-server/users.xml'
+ healthcheck:
+ test: bash -c "exec 6<> /dev/tcp/localhost/8123"
+ interval: 5s
diff --git a/.docker/nginx/local.conf b/.docker/nginx/local.conf
new file mode 100644
index 00000000..35fd4512
--- /dev/null
+++ b/.docker/nginx/local.conf
@@ -0,0 +1,12 @@
+upstream clickhouse_cluster {
+ server clickhouse1:8123;
+ server clickhouse2:8123;
+}
+
+server {
+ listen 8123;
+ client_max_body_size 100M;
+ location / {
+ proxy_pass http://clickhouse_cluster;
+ }
+}
diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml
new file mode 100644
index 00000000..6cd96c8f
--- /dev/null
+++ b/.github/workflows/testing.yml
@@ -0,0 +1,77 @@
+name: Testing
+
+on:
+ push:
+ branches: [ "master" ]
+ pull_request:
+ branches: [ "master" ]
+
+jobs:
+ tests_single:
+ name: Testing single server
+ runs-on: ubuntu-latest
+
+ env:
+ CLICKHOUSE_PORT: 18123
+ CLICKHOUSE_DATABASE: default
+
+ strategy:
+ fail-fast: true
+ max-parallel: 1
+ matrix:
+ ruby-version: [ '2.7', '3.0', '3.2' ]
+ clickhouse: [ '22.1' ]
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Start ClickHouse ${{ matrix.clickhouse }}
+ uses: isbang/compose-action@v1.5.1
+ env:
+ CLICKHOUSE_VERSION: ${{ matrix.clickhouse }}
+ with:
+ compose-file: '.docker/docker-compose.yml'
+ down-flags: '--volumes'
+
+ - name: Set up Ruby ${{ matrix.ruby-version }}
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: ${{ matrix.ruby-version }}
+ bundler-cache: true
+
+ - run: bundle exec rspec spec/single
+
+ tests_cluster:
+ name: Testing cluster server
+ runs-on: ubuntu-latest
+
+ env:
+ CLICKHOUSE_PORT: 28123
+ CLICKHOUSE_DATABASE: default
+ CLICKHOUSE_CLUSTER: test_cluster
+
+ strategy:
+ fail-fast: true
+ max-parallel: 1
+ matrix:
+ ruby-version: [ '2.7', '3.0', '3.2' ]
+ clickhouse: [ '22.1' ]
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Start ClickHouse Cluster ${{ matrix.clickhouse }}
+ uses: isbang/compose-action@v1.5.1
+ env:
+ CLICKHOUSE_VERSION: ${{ matrix.clickhouse }}
+ with:
+ compose-file: '.docker/docker-compose.cluster.yml'
+ down-flags: '--volumes'
+
+ - name: Set up Ruby ${{ matrix.ruby-version }}
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: ${{ matrix.ruby-version }}
+ bundler-cache: true
+
+ - run: bundle exec rspec spec/cluster
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3dbfb857..6cdd42c5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,40 @@
+### Version 1.0.7 (Apr 27, 2024)
+
+* Support table indexes
+* Fix non-canonical UUID by [@PauloMiranda98](https://github.com/PauloMiranda98) in (#117)
+* Fix precision loss due to JSON float parsing by [@jenskdsgn](https://github.com/jenskdsgn) in (#129)
+* Support functions by [@felix-dumit](https://github.com/felix-dumit) in (#120)
+* Hotfix/rails71 change column by [@trumenov](https://github.com/trumenov) in (#132)
+* Fix DB tasks
+
+### Version 1.0.5 (Mar 14, 2024)
+
+* GitHub workflows
+* Fix injection internal and schema classes for rails 7
+* Add support for binary string by [@PauloMiranda98](https://github.com/PauloMiranda98) in (#116)
+
+### Version 1.0.4 (Feb 2, 2024)
+
+* Use ILIKE for `model.arel_table[:column]#matches` by [@stympy](https://github.com/stympy) in (#115)
+* Fixed `insert_all` for array column (#71)
+* Register Bool and UUID in type map by [@lukinski](https://github.com/lukinski) in (#110)
+* Refactoring `final` method
+* Support update & delete for clickhouse from version 23.3 and newer (#93)
+
+### Version 1.0.0 (Nov 29, 2023)
+
+ * Full support Rails 7.1+
+ * Full support primary or multiple databases
+
+### Version 0.6.0 (Oct 19, 2023)
+
+ * Added `Bool` column type instead `Uint8` (#78). Supports ClickHouse 22+ database only
+ * Added `final` method (#81) (The `ar_internal_metadata` table needs to be deleted after a gem update)
+ * Added `settings` method (#82)
+ * Fixed convert aggregation type (#92)
+ * Fixed raise error not database exist (#91)
+ * Fixed internal metadata update (#84)
+
### Version 0.5.10 (Jun 22, 2022)
* Fixes to create_table method (#70)
diff --git a/README.md b/README.md
index d4644f57..176e6eba 100644
--- a/README.md
+++ b/README.md
@@ -202,8 +202,8 @@ false`. The default integer is `UInt32`
Example:
-``` ruby
-class CreateDataItems < ActiveRecord::Migration
+```ruby
+class CreateDataItems < ActiveRecord::Migration[7.1]
def change
create_table "data_items", id: false, options: "VersionedCollapsingMergeTree(sign, version) PARTITION BY toYYYYMM(day) ORDER BY category", force: :cascade do |t|
t.date "day", null: false
@@ -212,9 +212,42 @@ class CreateDataItems < ActiveRecord::Migration
t.integer "sign", limit: 1, unsigned: false, default: -> { "CAST(1, 'Int8')" }, null: false
t.integer "version", limit: 8, default: -> { "CAST(toUnixTimestamp(now()), 'UInt64')" }, null: false
end
+
+ create_table "with_index", id: false, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (date)' do |t|
+ t.integer :int1, null: false
+ t.integer :int2, null: false
+ t.date :date, null: false
+
+ t.index '(int1 * int2, date)', name: 'idx', type: 'minmax', granularity: 3
+ end
+
+ remove_index :some, 'idx'
+
+ add_index :some, 'int1 * int2', name: 'idx2', type: 'set(10)', granularity: 4
+ end
+end
+```
+
+Create table with custom column structure:
+
+```ruby
+class CreateDataItems < ActiveRecord::Migration[7.1]
+ def change
+ create_table "data_items", id: false, options: "MergeTree PARTITION BY toYYYYMM(timestamp) ORDER BY timestamp", force: :cascade do |t|
+ t.column "timestamp", "DateTime('UTC') CODEC(DoubleDelta, LZ4)"
+ end
+ end
+end
+```
+
+Create Buffer table with connection database name:
+
+```ruby
+class CreateDataItems < ActiveRecord::Migration[7.1]
+ def change
+ create_table :some_buffers, as: :some, options: "Buffer(#{connection.database}, some, 1, 10, 60, 100, 10000, 10000000, 100000000)"
end
end
-
```
@@ -246,6 +279,12 @@ After checking out the repo, run `bin/setup` to install dependencies. You can al
To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
+Testing github actions:
+
+```bash
+act
+```
+
## Contributing
Bug reports and pull requests are welcome on GitHub at [https://github.com/pnixx/clickhouse-activerecord](https://github.com/pnixx/clickhouse-activerecord). This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere to the [Contributor Covenant](http://contributor-covenant.org) code of conduct.
diff --git a/lib/active_record/connection_adapters/clickhouse/oid/uuid.rb b/lib/active_record/connection_adapters/clickhouse/oid/uuid.rb
index db843cd2..7e99c08c 100644
--- a/lib/active_record/connection_adapters/clickhouse/oid/uuid.rb
+++ b/lib/active_record/connection_adapters/clickhouse/oid/uuid.rb
@@ -6,6 +6,7 @@ module Clickhouse
module OID # :nodoc:
class Uuid < Type::Value # :nodoc:
ACCEPTABLE_UUID = %r{\A(\{)?([a-fA-F0-9]{4}-?){8}(?(1)\}|)\z}
+ CANONICAL_UUID = %r{\A[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}\z}
alias :serialize :deserialize
@@ -13,23 +14,32 @@ def type
:uuid
end
- def changed?(old_value, new_value, _)
+ def changed?(old_value, new_value, _new_value_before_type_cast)
old_value.class != new_value.class ||
- new_value && old_value.casecmp(new_value) != 0
+ new_value != old_value
end
def changed_in_place?(raw_old_value, new_value)
raw_old_value.class != new_value.class ||
- new_value && raw_old_value.casecmp(new_value) != 0
+ new_value != raw_old_value
end
private
def cast_value(value)
- casted = value.to_s
- casted if casted.match?(ACCEPTABLE_UUID)
+ value = value.to_s
+ format_uuid(value) if value.match?(ACCEPTABLE_UUID)
end
- end
+
+ def format_uuid(uuid)
+ if uuid.match?(CANONICAL_UUID)
+ uuid
+ else
+ uuid = uuid.delete("{}-").downcase
+ "#{uuid[..7]}-#{uuid[8..11]}-#{uuid[12..15]}-#{uuid[16..19]}-#{uuid[20..]}"
+ end
+ end
+ end
end
end
end
diff --git a/lib/active_record/connection_adapters/clickhouse/schema_creation.rb b/lib/active_record/connection_adapters/clickhouse/schema_creation.rb
index aa20d7c9..d3b015f9 100644
--- a/lib/active_record/connection_adapters/clickhouse/schema_creation.rb
+++ b/lib/active_record/connection_adapters/clickhouse/schema_creation.rb
@@ -1,4 +1,3 @@
-# frozen_string_literal: true
begin
require "active_record/connection_adapters/deduplicable"
rescue LoadError => e
@@ -89,14 +88,23 @@ def visit_TableDefinition(o)
create_sql = +"CREATE#{table_modifier_in_create(o)} #{o.view ? "VIEW" : "TABLE"} "
create_sql << "IF NOT EXISTS " if o.if_not_exists
create_sql << "#{quote_table_name(o.name)} "
+ add_as_clause!(create_sql, o) if o.as && !o.view
add_to_clause!(create_sql, o) if o.materialized
statements = o.columns.map { |c| accept c }
statements << accept(o.primary_keys) if o.primary_keys
+
+ if supports_indexes_in_create?
+ indexes = o.indexes.map do |expression, options|
+ accept(@conn.add_index_options(o.name, expression, **options))
+ end
+ statements.concat(indexes)
+ end
+
create_sql << "(#{statements.join(', ')})" if statements.present?
# Attach options for only table or materialized view without TO section
add_table_options!(create_sql, o) if !o.view || o.view && o.materialized && !o.to
- add_as_clause!(create_sql, o)
+ add_as_clause!(create_sql, o) if o.as && o.view
create_sql
end
@@ -108,7 +116,7 @@ def table_modifier_in_create(o)
def visit_ChangeColumnDefinition(o)
column = o.column
- column.sql_type = type_to_sql(column.type, column.options)
+ column.sql_type = type_to_sql(column.type, **column.options)
options = column_options(column)
quoted_column_name = quote_column_name(o.name)
@@ -124,6 +132,23 @@ def visit_ChangeColumnDefinition(o)
change_column_sql
end
+ def visit_IndexDefinition(o, create = false)
+ sql = create ? ["ALTER TABLE #{quote_table_name(o.table)} ADD"] : []
+ sql << "INDEX"
+ sql << "IF NOT EXISTS" if o.if_not_exists
+ sql << "IF EXISTS" if o.if_exists
+ sql << "#{quote_column_name(o.name)} (#{o.expression}) TYPE #{o.type}"
+ sql << "GRANULARITY #{o.granularity}" if o.granularity
+ sql << "FIRST #{quote_column_name(o.first)}" if o.first
+ sql << "AFTER #{quote_column_name(o.after)}" if o.after
+
+ sql.join(' ')
+ end
+
+ def visit_CreateIndexDefinition(o)
+ visit_IndexDefinition(o.index, true)
+ end
+
def current_database
ActiveRecord::Base.connection_db_config.database
end
diff --git a/lib/active_record/connection_adapters/clickhouse/schema_definitions.rb b/lib/active_record/connection_adapters/clickhouse/schema_definitions.rb
index 4bdc0564..7f03dda1 100644
--- a/lib/active_record/connection_adapters/clickhouse/schema_definitions.rb
+++ b/lib/active_record/connection_adapters/clickhouse/schema_definitions.rb
@@ -101,6 +101,23 @@ def map(*args, **options)
args.each { |name| column(name, :"Map(#{key_type}, #{value_type})", **options.except(:limit, :key_type, :value_type)) }
end
end
+
+ class IndexDefinition
+ attr_reader :table, :name, :expression, :type, :granularity, :first, :after, :if_exists, :if_not_exists
+
+ def initialize(table, name, expression, type, granularity, first:, after:, if_exists:, if_not_exists:)
+ @table = table
+ @name = name
+ @expression = expression
+ @type = type
+ @granularity = granularity
+ @first = first
+ @after = after
+ @if_exists = if_exists
+ @if_not_exists = if_not_exists
+ end
+
+ end
end
end
end
diff --git a/lib/active_record/connection_adapters/clickhouse/schema_statements.rb b/lib/active_record/connection_adapters/clickhouse/schema_statements.rb
index 8027e8b9..82512155 100644
--- a/lib/active_record/connection_adapters/clickhouse/schema_statements.rb
+++ b/lib/active_record/connection_adapters/clickhouse/schema_statements.rb
@@ -6,6 +6,8 @@ module ActiveRecord
module ConnectionAdapters
module Clickhouse
module SchemaStatements
+ DEFAULT_RESPONSE_FORMAT = 'JSONCompactEachRowWithNamesAndTypes'.freeze
+
def execute(sql, name = nil, settings: {})
do_execute(sql, name, settings: settings)
end
@@ -33,13 +35,20 @@ def exec_insert_all(sql, name)
# @link https://clickhouse.com/docs/en/sql-reference/statements/alter/update
def exec_update(_sql, _name = nil, _binds = [])
do_execute(_sql, _name, format: nil)
- true
+ 0
end
# @link https://clickhouse.com/docs/en/sql-reference/statements/delete
def exec_delete(_sql, _name = nil, _binds = [])
- do_execute(_sql, _name, format: nil)
- true
+ log(_sql, "#{adapter_name} #{_name}") do
+ res = request(_sql)
+ begin
+ data = JSON.parse(res.header['x-clickhouse-summary'])
+ data['result_rows'].to_i
+ rescue JSONError
+ 0
+ end
+ end
end
def tables(name = nil)
@@ -48,6 +57,16 @@ def tables(name = nil)
result['data'].flatten
end
+ def functions
+ result = do_system_execute("SELECT name FROM system.functions WHERE origin = 'SQLUserDefined'")
+ return [] if result.nil?
+ result['data'].flatten
+ end
+
+ def show_create_function(function)
+ do_execute("SELECT create_query FROM system.functions WHERE origin = 'SQLUserDefined' AND name = '#{function}'", format: nil)
+ end
+
def table_options(table)
sql = show_create_table(table)
{ options: sql.gsub(/^(?:.*?)(?:ENGINE = (.*?))?( AS SELECT .*?)?$/, '\\1').presence, as: sql.match(/^CREATE (?:.*?) AS (SELECT .*?)$/).try(:[], 1) }.compact
@@ -58,25 +77,29 @@ def indexes(table_name, name = nil)
[]
end
+ def add_index_options(table_name, expression, **options)
+ options.assert_valid_keys(:name, :type, :granularity, :first, :after, :if_not_exists, :if_exists)
+
+ validate_index_length!(table_name, options[:name])
+
+ IndexDefinition.new(table_name, options[:name], expression, options[:type], options[:granularity], first: options[:first], after: options[:after], if_not_exists: options[:if_not_exists], if_exists: options[:if_exists])
+ end
+
def data_sources
tables
end
def do_system_execute(sql, name = nil)
log_with_debug(sql, "#{adapter_name} #{name}") do
- res = @connection.post("/?#{@config.to_param}", "#{sql} FORMAT JSONCompact", 'User-Agent' => "Clickhouse ActiveRecord #{ClickhouseActiverecord::VERSION}")
-
- process_response(res)
+ res = request(sql, DEFAULT_RESPONSE_FORMAT)
+ process_response(res, DEFAULT_RESPONSE_FORMAT, sql)
end
end
- def do_execute(sql, name = nil, format: 'JSONCompact', settings: {})
+ def do_execute(sql, name = nil, format: DEFAULT_RESPONSE_FORMAT, settings: {})
log(sql, "#{adapter_name} #{name}") do
- formatted_sql = apply_format(sql, format)
- request_params = @config || {}
- res = @connection.post("/?#{request_params.merge(settings).to_param}", formatted_sql, 'User-Agent' => "Clickhouse ActiveRecord #{ClickhouseActiverecord::VERSION}")
-
- process_response(res)
+ res = request(sql, format, settings)
+ process_response(res, format, sql)
end
end
@@ -96,23 +119,44 @@ def assume_migrated_upto_version(version, migrations_paths = nil)
if (duplicate = inserting.detect { |v| inserting.count(v) > 1 })
raise "Duplicate migration #{duplicate}. Please renumber your migrations to resolve the conflict."
end
- do_execute(insert_versions_sql(inserting), nil, settings: {max_partitions_per_insert_block: [100, inserting.size].max})
+ do_execute(insert_versions_sql(inserting), nil, format: nil, settings: {max_partitions_per_insert_block: [100, inserting.size].max})
+ end
+ end
+
+ # Fix insert_all method
+ # https://github.com/PNixx/clickhouse-activerecord/issues/71#issuecomment-1923244983
+ def with_yaml_fallback(value) # :nodoc:
+ if value.is_a?(Array)
+ value
+ else
+ super
end
end
private
+ # Make HTTP request to ClickHouse server
+ # @param [String] sql
+ # @param [String, nil] format
+ # @param [Hash] settings
+ # @return [Net::HTTPResponse]
+ def request(sql, format = nil, settings = {})
+ formatted_sql = apply_format(sql, format)
+ request_params = @connection_config || {}
+ @connection.post("/?#{request_params.merge(settings).to_param}", formatted_sql, 'User-Agent' => "Clickhouse ActiveRecord #{ClickhouseActiverecord::VERSION}")
+ end
+
def apply_format(sql, format)
format ? "#{sql} FORMAT #{format}" : sql
end
- def process_response(res)
+ def process_response(res, format, sql = nil)
case res.code.to_i
when 200
if res.body.to_s.include?("DB::Exception")
- raise ActiveRecord::ActiveRecordError, "Response code: #{res.code}:\n#{res.body}"
+ raise ActiveRecord::ActiveRecordError, "Response code: #{res.code}:\n#{res.body}#{sql ? "\nQuery: #{sql}" : ''}"
else
- res.body.presence && JSON.parse(res.body)
+ format_body_response(res.body, format)
end
else
case res.body
@@ -158,8 +202,7 @@ def table_structure(table_name)
return data unless data.empty?
- raise ActiveRecord::StatementInvalid,
- "Could not find table '#{table_name}'"
+ raise ActiveRecord::StatementInvalid, "Could not find table '#{table_name}'"
end
alias column_definitions table_structure
@@ -193,6 +236,44 @@ def extract_default_function(default_value, default) # :nodoc:
def has_default_function?(default_value, default) # :nodoc:
!default_value && (%r{\w+\(.*\)} === default)
end
+
+ def format_body_response(body, format)
+ return body if body.blank?
+
+ case format
+ when 'JSONCompact'
+ format_from_json_compact(body)
+ when 'JSONCompactEachRowWithNamesAndTypes'
+ format_from_json_compact_each_row_with_names_and_types(body)
+ else
+ body
+ end
+ end
+
+ def format_from_json_compact(body)
+ parse_json_payload(body)
+ end
+
+ def format_from_json_compact_each_row_with_names_and_types(body)
+ rows = body.split("\n").map { |row| parse_json_payload(row) }
+ names, types, *data = rows
+
+ meta = names.zip(types).map do |name, type|
+ {
+ 'name' => name,
+ 'type' => type
+ }
+ end
+
+ {
+ 'meta' => meta,
+ 'data' => data
+ }
+ end
+
+ def parse_json_payload(payload)
+ JSON.parse(payload, decimal_class: BigDecimal)
+ end
end
end
end
diff --git a/lib/active_record/connection_adapters/clickhouse_adapter.rb b/lib/active_record/connection_adapters/clickhouse_adapter.rb
index d2879af6..8c98cf2c 100644
--- a/lib/active_record/connection_adapters/clickhouse_adapter.rb
+++ b/lib/active_record/connection_adapters/clickhouse_adapter.rb
@@ -17,44 +17,37 @@
require 'openssl'
module ActiveRecord
- module ConnectionHandling # :nodoc:
- def clickhouse_adapdated_class
- ConnectionAdapters::ClickhouseAdapter
- end
+ class Base
+ class << self
+ # Establishes a connection to the database that's used by all Active Record objects
+ def clickhouse_connection(config)
+ config = config.symbolize_keys
+
+ if config[:connection]
+ connection = {
+ connection: config[:connection]
+ }
+ else
+ port = config[:port] || 8123
+ connection = {
+ host: config[:host] || 'localhost',
+ port: port,
+ ssl: config[:ssl].present? ? config[:ssl] : port == 443,
+ sslca: config[:sslca],
+ read_timeout: config[:read_timeout],
+ write_timeout: config[:write_timeout],
+ keep_alive_timeout: config[:keep_alive_timeout]
+ }
+ end
- # Establishes a connection to the database that's used by all Active Record objects
- def clickhouse_connection(config)
- config = config.symbolize_keys
-
- if config[:connection]
- connection = {
- connection: config[:connection]
- }
- else
- port = config[:port] || 8123
- connection = {
- host: config[:host] || 'localhost',
- port: port,
- ssl: config[:ssl].present? ? config[:ssl] : port == 443,
- sslca: config[:sslca],
- read_timeout: config[:read_timeout],
- write_timeout: config[:write_timeout],
- keep_alive_timeout: config[:keep_alive_timeout]
- }
- end
-
- if config.key?(:database)
- database = config[:database]
- else
- raise ArgumentError, 'No database specified. Missing argument: database.'
- end
-
- clickhouse_adapdated_class.new(
- logger,
- connection,
- { user: config[:username], password: config[:password], database: database }.compact,
- config
- )
+ if config.key?(:database)
+ database = config[:database]
+ else
+ raise ArgumentError, 'No database specified. Missing argument: database.'
+ end
+
+ ConnectionAdapters::ClickhouseAdapter.new(logger, connection, config)
+ end
end
end
@@ -81,10 +74,11 @@ def is_view
def is_view=(value)
@is_view = value
end
- #
- # def arel_table # :nodoc:
- # @arel_table ||= Arel::Table.new(table_name, type_caster: type_caster)
- # end
+
+ def _delete_record(constraints)
+ raise ActiveRecord::ActiveRecordError.new('Deleting a row is not possible without a primary key') unless self.primary_key
+ super
+ end
end
end
@@ -160,47 +154,15 @@ class ClickhouseAdapter < AbstractAdapter
include Clickhouse::SchemaStatements
- class << self
- private
-
- def initialize_type_map(m) # :nodoc:
- super
- register_class_with_limit m, %r(String), Type::String
- register_class_with_limit m, 'Date', Clickhouse::OID::Date
- register_class_with_precision m, %r(datetime)i, Clickhouse::OID::DateTime
-
- register_class_with_limit m, %r(Int8), Type::Integer
- register_class_with_limit m, %r(Int16), Type::Integer
- register_class_with_limit m, %r(Int32), Type::Integer
- register_class_with_limit m, %r(Int64), Type::Integer
- register_class_with_limit m, %r(Int128), Type::Integer
- register_class_with_limit m, %r(Int256), Type::Integer
-
- register_class_with_limit m, %r(UInt8), Type::UnsignedInteger
- register_class_with_limit m, %r(UInt16), Type::UnsignedInteger
- register_class_with_limit m, %r(UInt32), Type::UnsignedInteger
- register_class_with_limit m, %r(UInt64), Type::UnsignedInteger
- register_class_with_limit m, %r(UInt256), Type::UnsignedInteger
- # register_class_with_limit m, %r(Array), Clickhouse::OID::Array
- m.register_type(%r(Array)) do |sql_type|
- Clickhouse::OID::Array.new(sql_type)
- end
-
- m.register_type(%r(Map)) do |sql_type|
- Clickhouse::OID::Map.new(sql_type)
- end
- end
- end
-
TYPE_MAP = Type::TypeMap.new.tap { |m| initialize_type_map(m) }
# Initializes and connects a Clickhouse adapter.
- def initialize(logger, connection_parameters, config, full_config)
+ def initialize(logger, connection_parameters, config)
super(nil, logger)
@connection_parameters = connection_parameters
+ @connection_config = { user: config[:username], password: config[:password], database: config[:database] }.compact
+ @debug = config[:debug] || false
@config = config
- @debug = full_config[:debug] || false
- @full_config = full_config
@prepared_statements = false
@@ -208,7 +170,7 @@ def initialize(logger, connection_parameters, config, full_config)
end
def migrations_paths
- @full_config[:migrations_paths] || 'db/migrate_clickhouse'
+ @config[:migrations_paths] || 'db/migrate_clickhouse'
end
def arel_visitor # :nodoc:
@@ -227,6 +189,10 @@ def valid_type?(type)
!native_database_types[type].nil?
end
+ def supports_indexes_in_create?
+ true
+ end
+
class << self
def extract_limit(sql_type) # :nodoc:
case sql_type
@@ -285,6 +251,10 @@ def initialize_type_map(m) # :nodoc:
m.register_type(%r(Array)) do |sql_type|
Clickhouse::OID::Array.new(sql_type)
end
+
+ m.register_type(%r(Map)) do |sql_type|
+ Clickhouse::OID::Map.new(sql_type)
+ end
end
end
@@ -293,10 +263,6 @@ def type_map
@type_map ||= Type::TypeMap.new.tap { |m| ClickhouseAdapter.initialize_type_map(m) }
end
- def extract_precision(sql_type)
- $1.to_i if sql_type =~ /\((\d+)(,\s?\d+)?\)/
- end
-
def quote(value)
case value
when Array
@@ -349,8 +315,8 @@ def show_create_table(table)
def create_database(name)
sql = apply_cluster "CREATE DATABASE #{quote_table_name(name)}"
log_with_debug(sql, adapter_name) do
- res = @connection.post("/?#{@config.except(:database).to_param}", sql)
- process_response(res)
+ res = @connection.post("/?#{@connection_config.except(:database).to_param}", sql)
+ process_response(res, DEFAULT_RESPONSE_FORMAT)
end
end
@@ -371,7 +337,7 @@ def create_table(table_name, **options, &block)
options = apply_replica(table_name, options)
td = create_table_definition(apply_cluster(table_name), **options)
block.call td if block_given?
- td.column(:id, options[:id], null: false) if options[:id].present? && td[:id].blank?
+ td.column(:id, options[:id], null: false) if options[:id].present? && td[:id].blank? && options[:as].blank?
if options[:force]
drop_table(table_name, options.merge(if_exists: true))
@@ -385,17 +351,28 @@ def create_table(table_name, **options, &block)
raise 'Set a cluster' unless cluster
distributed_options =
- "Distributed(#{cluster}, #{@config[:database]}, #{table_name}, #{sharding_key})"
+ "Distributed(#{cluster}, #{@connection_config[:database]}, #{table_name}, #{sharding_key})"
create_table(distributed_table_name, **options.merge(options: distributed_options), &block)
end
end
+ def create_function(name, body)
+ fd = "CREATE FUNCTION #{apply_cluster(quote_table_name(name))} AS #{body}"
+ do_execute(fd, format: nil)
+ end
+
# Drops a ClickHouse database.
def drop_database(name) #:nodoc:
sql = apply_cluster "DROP DATABASE IF EXISTS #{quote_table_name(name)}"
log_with_debug(sql, adapter_name) do
- res = @connection.post("/?#{@config.except(:database).to_param}", sql)
- process_response(res)
+ res = @connection.post("/?#{@connection_config.except(:database).to_param}", sql)
+ process_response(res, DEFAULT_RESPONSE_FORMAT)
+ end
+ end
+
+ def drop_functions
+ functions.each do |function|
+ drop_function(function)
end
end
@@ -418,6 +395,16 @@ def drop_table(table_name, options = {}) # :nodoc:
end
end
+ def drop_function(name, options = {})
+ query = "DROP FUNCTION"
+ query = "#{query} IF EXISTS " if options[:if_exists]
+ query = "#{query} #{quote_table_name(name)}"
+ query = apply_cluster(query)
+ query = "#{query} SYNC" if options[:sync]
+
+ do_execute(query, format: nil)
+ end
+
def add_column(table_name, column_name, type, **options)
return if options[:if_not_exists] == true && column_exists?(table_name, column_name, type)
@@ -432,8 +419,8 @@ def remove_column(table_name, column_name, type = nil, **options)
execute("ALTER TABLE #{quote_table_name(table_name)} #{remove_column_for_alter(table_name, column_name, type, **options)}", nil, settings: {wait_end_of_query: 1, send_progress_in_http_headers: 1})
end
- def change_column(table_name, column_name, type, options = {})
- result = do_execute("ALTER TABLE #{quote_table_name(table_name)} #{change_column_for_alter(table_name, column_name, type, options)}", nil, settings: {wait_end_of_query: 1, send_progress_in_http_headers: 1})
+ def change_column(table_name, column_name, type, **options)
+ result = do_execute("ALTER TABLE #{quote_table_name(table_name)} #{change_column_for_alter(table_name, column_name, type, **options)}", nil, settings: {wait_end_of_query: 1, send_progress_in_http_headers: 1})
raise "Error parse json response: #{result}" if result.presence && !result.is_a?(Hash)
end
@@ -447,16 +434,53 @@ def change_column_default(table_name, column_name, default)
change_column table_name, column_name, nil, {default: default}.compact
end
+ # Adds index description to tables metadata
+ # @link https://clickhouse.com/docs/en/sql-reference/statements/alter/skipping-index
+ def add_index(table_name, expression, **options)
+ index = add_index_options(apply_cluster(table_name), expression, **options)
+ execute schema_creation.accept(CreateIndexDefinition.new(index))
+ end
+
+ # Removes index description from tables metadata and deletes index files from disk
+ def remove_index(table_name, name)
+ query = apply_cluster("ALTER TABLE #{quote_table_name(table_name)}")
+ execute "#{query} DROP INDEX #{quote_column_name(name)}"
+ end
+
+ # Rebuilds the secondary index name for the specified partition_name
+ def rebuild_index(table_name, name, if_exists: false, partition: nil)
+ query = [apply_cluster("ALTER TABLE #{quote_table_name(table_name)}")]
+ query << 'MATERIALIZE INDEX'
+ query << 'IF EXISTS' if if_exists
+ query << quote_column_name(name)
+ query << "IN PARTITION #{quote_column_name(partition)}" if partition
+ execute query.join(' ')
+ end
+
+ # Deletes the secondary index files from disk without removing description
+ def clear_index(table_name, name, if_exists: false, partition: nil)
+ query = [apply_cluster("ALTER TABLE #{quote_table_name(table_name)}")]
+ query << 'CLEAR INDEX'
+ query << 'IF EXISTS' if if_exists
+ query << quote_column_name(name)
+ query << "IN PARTITION #{quote_column_name(partition)}" if partition
+ execute query.join(' ')
+ end
+
def cluster
- @full_config[:cluster_name]
+ @config[:cluster_name]
end
def replica
- @full_config[:replica_name]
+ @config[:replica_name]
+ end
+
+ def database
+ @config[:database]
end
def use_default_replicated_merge_tree_params?
- database_engine_atomic? && @full_config[:use_default_replicated_merge_tree_params]
+ database_engine_atomic? && @config[:use_default_replicated_merge_tree_params]
end
def use_replica?
@@ -464,11 +488,11 @@ def use_replica?
end
def replica_path(table)
- "/clickhouse/tables/#{cluster}/#{@config[:database]}.#{table}"
+ "/clickhouse/tables/#{cluster}/#{@connection_config[:database]}.#{table}"
end
def database_engine_atomic?
- current_database_engine = "select engine from system.databases where name = '#{@config[:database]}'"
+ current_database_engine = "select engine from system.databases where name = '#{@connection_config[:database]}'"
res = select_one(current_database_engine)
res['engine'] == 'Atomic' if res
end
@@ -502,9 +526,9 @@ def last_inserted_id(result)
result
end
- def change_column_for_alter(table_name, column_name, type, options = {})
+ def change_column_for_alter(table_name, column_name, type, **options)
td = create_table_definition(table_name)
- cd = td.new_column_definition(column_name, type, options)
+ cd = td.new_column_definition(column_name, type, **options)
schema_creation.accept(ChangeColumnDefinition.new(cd, column_name))
end
diff --git a/lib/arel/visitors/clickhouse.rb b/lib/arel/visitors/clickhouse.rb
index e9a180aa..a8fd5993 100644
--- a/lib/arel/visitors/clickhouse.rb
+++ b/lib/arel/visitors/clickhouse.rb
@@ -13,6 +13,13 @@ def aggregate(name, o, collector)
end
end
+ # https://clickhouse.com/docs/en/sql-reference/statements/delete
+ # DELETE and UPDATE in ClickHouse working only without table name
+ def visit_Arel_Attributes_Attribute(o, collector)
+ collector << quote_table_name(o.relation.table_alias || o.relation.name) << '.' unless collector.value.start_with?('DELETE FROM ') || collector.value.include?(' UPDATE ')
+ collector << quote_column_name(o.name)
+ end
+
def visit_Arel_Nodes_SelectOptions(o, collector)
maybe_visit o.settings, super
end
@@ -53,6 +60,16 @@ def visit_Arel_Nodes_Using o, collector
collector
end
+ def visit_Arel_Nodes_Matches(o, collector)
+ op = o.case_sensitive ? " LIKE " : " ILIKE "
+ infix_value o, collector, op
+ end
+
+ def visit_Arel_Nodes_DoesNotMatch(o, collector)
+ op = o.case_sensitive ? " NOT LIKE " : " NOT ILIKE "
+ infix_value o, collector, op
+ end
+
def sanitize_as_setting_value(value)
if value == :default
'DEFAULT'
diff --git a/lib/clickhouse-activerecord.rb b/lib/clickhouse-activerecord.rb
index 67f95bda..40ce7ef7 100644
--- a/lib/clickhouse-activerecord.rb
+++ b/lib/clickhouse-activerecord.rb
@@ -5,15 +5,12 @@
require 'core_extensions/active_record/internal_metadata'
require 'core_extensions/active_record/relation'
require 'core_extensions/active_record/schema_migration'
-
+require 'core_extensions/active_record/migration/command_recorder'
require 'core_extensions/arel/nodes/select_core'
require 'core_extensions/arel/nodes/select_statement'
require 'core_extensions/arel/select_manager'
require 'core_extensions/arel/table'
-require_relative '../core_extensions/active_record/migration/command_recorder'
-ActiveRecord::Migration::CommandRecorder.include CoreExtensions::ActiveRecord::Migration::CommandRecorder
-
if defined?(Rails::Railtie)
require 'clickhouse-activerecord/railtie'
require 'clickhouse-activerecord/schema'
@@ -24,9 +21,10 @@
module ClickhouseActiverecord
def self.load
- ActiveRecord::InternalMetadata.singleton_class.prepend(CoreExtensions::ActiveRecord::InternalMetadata::ClassMethods)
+ ActiveRecord::InternalMetadata.prepend(CoreExtensions::ActiveRecord::InternalMetadata)
+ ActiveRecord::Migration::CommandRecorder.include(CoreExtensions::ActiveRecord::Migration::CommandRecorder)
ActiveRecord::Relation.prepend(CoreExtensions::ActiveRecord::Relation)
- ActiveRecord::SchemaMigration.singleton_class.prepend(CoreExtensions::ActiveRecord::SchemaMigration::ClassMethods)
+ ActiveRecord::SchemaMigration.prepend(CoreExtensions::ActiveRecord::SchemaMigration)
Arel::Nodes::SelectCore.prepend(CoreExtensions::Arel::Nodes::SelectCore)
Arel::Nodes::SelectStatement.prepend(CoreExtensions::Arel::Nodes::SelectStatement)
diff --git a/lib/clickhouse-activerecord/schema_dumper.rb b/lib/clickhouse-activerecord/schema_dumper.rb
index 6badba18..cbd5a2e6 100644
--- a/lib/clickhouse-activerecord/schema_dumper.rb
+++ b/lib/clickhouse-activerecord/schema_dumper.rb
@@ -34,8 +34,12 @@ def header(stream)
end
def tables(stream)
- sorted_tables = @connection.tables.sort {|a,b| @connection.show_create_table(a).match(/^CREATE\s+(MATERIALIZED\s+)?VIEW/) ? 1 : a <=> b }
+ functions = @connection.functions
+ functions.each do |function|
+ function(function, stream)
+ end
+ sorted_tables = @connection.tables.sort {|a,b| @connection.show_create_table(a).match(/^CREATE\s+(MATERIALIZED\s+)?VIEW/) ? 1 : a <=> b }
sorted_tables.each do |table_name|
table(table_name, stream) unless ignored?(table_name)
end
@@ -86,7 +90,9 @@ def table(table, stream)
unless simple
table_options = @connection.table_options(table)
if table_options.present?
- tbl.print ", #{format_options(table_options)}"
+ table_options = format_options(table_options)
+ table_options.gsub!(/Buffer\('[^']+'/, 'Buffer(\'#{connection.database}\'')
+ tbl.print ", #{table_options}"
end
end
@@ -104,7 +110,13 @@ def table(table, stream)
end
end
- indexes_in_create(table, tbl)
+ indexes = sql.scan(/INDEX \S+ \S+ TYPE .*? GRANULARITY \d+/)
+ if indexes.any?
+ tbl.puts ''
+ indexes.flatten.map!(&:strip).each do |index|
+ tbl.puts " t.index #{index_parts(index).join(', ')}"
+ end
+ end
tbl.puts " end"
tbl.puts
@@ -119,9 +131,16 @@ def table(table, stream)
end
end
+ def function(function, stream)
+ stream.puts " # FUNCTION: #{function}"
+ sql = @connection.show_create_function(function)
+ stream.puts " # SQL: #{sql}" if sql
+ stream.puts " create_function \"#{function}\", \"#{sql.gsub(/^CREATE FUNCTION (.*?) AS/, '').strip}\"" if sql
+ end
+
def format_options(options)
if options && options[:options]
- options[:options] = options[:options].gsub(/^Replicated(.*?)\('[^']+',\s*'[^']+',?\s?([^\)]*)?\)/, "\\1(\\2)")
+ options[:options].gsub!(/^Replicated(.*?)\('[^']+',\s*'[^']+',?\s?([^\)]*)?\)/, "\\1(\\2)")
end
super
end
@@ -160,5 +179,16 @@ def prepare_column_options(column)
spec.merge(super).compact
end
+
+ def index_parts(index)
+ idx = index.match(/^INDEX (?\S+) (?.*?) TYPE (?.*?) GRANULARITY (?\d+)$/)
+ index_parts = [
+ format_index_parts(idx['expr']),
+ "name: #{format_index_parts(idx['name'])}",
+ "type: #{format_index_parts(idx['type'])}",
+ ]
+ index_parts << "granularity: #{idx['granularity']}" if idx['granularity']
+ index_parts
+ end
end
end
diff --git a/lib/clickhouse-activerecord/tasks.rb b/lib/clickhouse-activerecord/tasks.rb
index e3635992..130116a5 100644
--- a/lib/clickhouse-activerecord/tasks.rb
+++ b/lib/clickhouse-activerecord/tasks.rb
@@ -4,13 +4,17 @@ module ClickhouseActiverecord
class Tasks
delegate :connection, :establish_connection, to: ActiveRecord::Base
+ def self.using_database_configurations?
+ true
+ end
+
def initialize(configuration)
- @configuration = configuration.with_indifferent_access
+ @configuration = configuration
end
def create
establish_master_connection
- connection.create_database @configuration['database']
+ connection.create_database @configuration.database
rescue ActiveRecord::StatementInvalid => e
if e.cause.to_s.include?('already exists')
raise ActiveRecord::DatabaseAlreadyExists
@@ -21,7 +25,7 @@ def create
def drop
establish_master_connection
- connection.drop_database @configuration['database']
+ connection.drop_database @configuration.database
end
def purge
@@ -31,12 +35,28 @@ def purge
end
def structure_dump(*args)
- tables = connection.execute("SHOW TABLES FROM #{@configuration['database']}")['data'].flatten
+ establish_master_connection
+
+ # get all tables
+ tables = connection.execute("SHOW TABLES FROM #{@configuration.database} WHERE name NOT LIKE '.inner_id.%'")['data'].flatten.map do |table|
+ next if %w[schema_migrations ar_internal_metadata].include?(table)
+ connection.show_create_table(table).gsub("#{@configuration.database}.", '')
+ end.compact
+ # sort view to last
+ tables.sort_by! {|table| table.match(/^CREATE\s+(MATERIALIZED\s+)?VIEW/) ? 1 : 0}
+
+ # get all functions
+ functions = connection.execute("SELECT create_query FROM system.functions WHERE origin = 'SQLUserDefined'")['data'].flatten
+
+ # put to file
File.open(args.first, 'w:utf-8') do |file|
+ functions.each do |function|
+ file.puts function + ";\n\n"
+ end
+
tables.each do |table|
- next if table.match(/\.inner/)
- file.puts connection.execute("SHOW CREATE TABLE #{table}")['data'].try(:first).try(:first).gsub("#{@configuration['database']}.", '') + ";\n\n"
+ file.puts table + ";\n\n"
end
end
end
diff --git a/lib/clickhouse-activerecord/version.rb b/lib/clickhouse-activerecord/version.rb
index f7d40eb2..efa6f1df 100644
--- a/lib/clickhouse-activerecord/version.rb
+++ b/lib/clickhouse-activerecord/version.rb
@@ -1,3 +1,3 @@
module ClickhouseActiverecord
- VERSION = '1.0.3'
+ VERSION = '1.0.9'
end
diff --git a/lib/core_extensions/active_record/internal_metadata.rb b/lib/core_extensions/active_record/internal_metadata.rb
index fb7f4d4f..5f59dbfb 100644
--- a/lib/core_extensions/active_record/internal_metadata.rb
+++ b/lib/core_extensions/active_record/internal_metadata.rb
@@ -1,46 +1,54 @@
module CoreExtensions
module ActiveRecord
module InternalMetadata
- module ClassMethods
- def []=(key, value)
- row = final.find_by(key: key)
- if row.nil? || row.value != value
- create!(key: key, value: value)
- end
- end
+ def create_table
+ return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+ return if table_exists? || !enabled?
+
+ key_options = connection.internal_string_options_for_primary_key
+ table_options = {
+ id: false,
+ options: 'ReplacingMergeTree(created_at) PARTITION BY key ORDER BY key',
+ if_not_exists: true
+ }
+ full_config = connection.instance_variable_get(:@config) || {}
+
+ if full_config[:distributed_service_tables]
+ table_options.merge!(with_distributed: table_name, sharding_key: 'cityHash64(created_at)')
- def [](key)
- final.where(key: key).pluck(:value).first
+ distributed_suffix = "_#{full_config[:distributed_service_tables_suffix] || 'distributed'}"
+ else
+ distributed_suffix = ''
end
- def create_table
- return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
- return if table_exists? || !enabled?
-
- key_options = connection.internal_string_options_for_primary_key
- table_options = {
- id: false,
- options: connection.adapter_name.downcase == 'clickhouse' ? 'ReplacingMergeTree(created_at) PARTITION BY key ORDER BY key' : '',
- if_not_exists: true
- }
- full_config = connection.instance_variable_get(:@full_config) || {}
-
- if full_config[:distributed_service_tables]
- table_options.merge!(with_distributed: table_name, sharding_key: 'cityHash64(created_at)')
-
- distributed_suffix = "_#{full_config[:distributed_service_tables_suffix] || 'distributed'}"
- else
- distributed_suffix = ''
- end
-
- connection.create_table(table_name + distributed_suffix.to_s, **table_options) do |t|
- t.string :key, **key_options
- t.string :value
- t.timestamps
- end
+ connection.create_table(table_name + distributed_suffix.to_s, **table_options) do |t|
+ t.string :key, **key_options
+ t.string :value
+ t.timestamps
end
end
+
+ private
+
+ def update_entry(key, new_value)
+ return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+
+ create_entry(key, new_value)
+ end
+
+ def select_entry(key)
+ return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+
+ sm = ::Arel::SelectManager.new(arel_table)
+ sm.final! if connection.table_options(table_name)[:options] =~ /^ReplacingMergeTree/
+ sm.project(::Arel.star)
+ sm.where(arel_table[primary_key].eq(::Arel::Nodes::BindParam.new(key)))
+ sm.order(arel_table[primary_key].asc)
+ sm.limit = 1
+
+ connection.select_one(sm, "#{self.class} Load")
+ end
end
end
end
diff --git a/core_extensions/active_record/migration/command_recorder.rb b/lib/core_extensions/active_record/migration/command_recorder.rb
similarity index 100%
rename from core_extensions/active_record/migration/command_recorder.rb
rename to lib/core_extensions/active_record/migration/command_recorder.rb
diff --git a/lib/core_extensions/active_record/schema_migration.rb b/lib/core_extensions/active_record/schema_migration.rb
index 3ff978e2..0da6a7ee 100644
--- a/lib/core_extensions/active_record/schema_migration.rb
+++ b/lib/core_extensions/active_record/schema_migration.rb
@@ -1,47 +1,45 @@
module CoreExtensions
module ActiveRecord
module SchemaMigration
- module ClassMethods
- def create_table
- return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+ def create_table
+ return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
- return if table_exists?
+ return if table_exists?
- version_options = connection.internal_string_options_for_primary_key
- table_options = {
- id: false, options: 'ReplacingMergeTree(ver) ORDER BY (version)', if_not_exists: true
- }
- full_config = connection.instance_variable_get(:@full_config) || {}
+ version_options = connection.internal_string_options_for_primary_key
+ table_options = {
+ id: false, options: 'ReplacingMergeTree(ver) ORDER BY (version)', if_not_exists: true
+ }
+ full_config = connection.instance_variable_get(:@config) || {}
- if full_config[:distributed_service_tables]
- table_options.merge!(with_distributed: table_name, sharding_key: 'cityHash64(version)')
+ if full_config[:distributed_service_tables]
+ table_options.merge!(with_distributed: table_name, sharding_key: 'cityHash64(version)')
- distributed_suffix = "_#{full_config[:distributed_service_tables_suffix] || 'distributed'}"
- else
- distributed_suffix = ''
- end
+ distributed_suffix = "_#{full_config[:distributed_service_tables_suffix] || 'distributed'}"
+ else
+ distributed_suffix = ''
+ end
- connection.create_table(table_name + distributed_suffix.to_s, **table_options) do |t|
- t.string :version, **version_options
- t.column :active, 'Int8', null: false, default: '1'
- t.datetime :ver, null: false, default: -> { 'now()' }
- end
+ connection.create_table(table_name + distributed_suffix.to_s, **table_options) do |t|
+ t.string :version, **version_options
+ t.column :active, 'Int8', null: false, default: '1'
+ t.datetime :ver, null: false, default: -> { 'now()' }
end
+ end
- def delete_version(version)
- return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+ def delete_version(version)
+ return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
- im = Arel::InsertManager.new(arel_table)
- im.insert(arel_table[primary_key] => version.to_s, arel_table['active'] => 0)
- connection.insert(im, "#{self.class} Create Rollback Version", primary_key, version)
- end
+ im = ::Arel::InsertManager.new(arel_table)
+ im.insert(arel_table[primary_key] => version.to_s, arel_table['active'] => 0)
+ connection.insert(im, "#{self.class} Create Rollback Version", primary_key, version)
+ end
- def all_versions
- return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+ def all_versions
+ return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
- final.where(active: 1).order(:version).pluck(:version)
- end
+ final.where(active: 1).order(:version).pluck(:version)
end
end
end
diff --git a/lib/core_extensions/arel/nodes/select_statement.rb b/lib/core_extensions/arel/nodes/select_statement.rb
index 2784d027..b8f26ab5 100644
--- a/lib/core_extensions/arel/nodes/select_statement.rb
+++ b/lib/core_extensions/arel/nodes/select_statement.rb
@@ -5,7 +5,7 @@ module SelectStatement
attr_accessor :settings
def initialize(relation = nil)
- super(relation)
+ super
@settings = nil
end
diff --git a/lib/tasks/clickhouse.rake b/lib/tasks/clickhouse.rake
index eb37aea5..4854021d 100644
--- a/lib/tasks/clickhouse.rake
+++ b/lib/tasks/clickhouse.rake
@@ -15,15 +15,18 @@ namespace :clickhouse do
# TODO: deprecated
desc 'Load database schema'
task load: %i[prepare_internal_metadata_table] do
+ puts 'Warning: `rake clickhouse:schema:load` is deprecated! Use `rake db:schema:load:clickhouse` instead'
simple = ENV['simple'] || ARGV.any? { |a| a.include?('--simple') } ? '_simple' : nil
ActiveRecord::Base.establish_connection(:clickhouse)
- ActiveRecord::SchemaMigration.drop_table
+ connection = ActiveRecord::Tasks::DatabaseTasks.migration_connection
+ connection.schema_migration.drop_table
load(Rails.root.join("db/clickhouse_schema#{simple}.rb"))
end
# TODO: deprecated
desc 'Dump database schema'
task dump: :environment do |_, args|
+ puts 'Warning: `rake clickhouse:schema:dump` is deprecated! Use `rake db:schema:dump:clickhouse` instead'
simple = ENV['simple'] || args[:simple] || ARGV.any? { |a| a.include?('--simple') } ? '_simple' : nil
filename = Rails.root.join("db/clickhouse_schema#{simple}.rb")
File.open(filename, 'w:utf-8') do |file|
@@ -36,43 +39,38 @@ namespace :clickhouse do
namespace :structure do
desc 'Load database structure'
task load: ['db:check_protected_environments'] do
- config = ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'clickhouse')
ClickhouseActiverecord::Tasks.new(config).structure_load(Rails.root.join('db/clickhouse_structure.sql'))
end
desc 'Dump database structure'
task dump: ['db:check_protected_environments'] do
- config = ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'clickhouse')
ClickhouseActiverecord::Tasks.new(config).structure_dump(Rails.root.join('db/clickhouse_structure.sql'))
end
end
desc 'Creates the database from DATABASE_URL or config/database.yml'
task create: [] do
- config = ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'clickhouse')
- ActiveRecord::Tasks::DatabaseTasks.create(config)
+ puts 'Warning: `rake clickhouse:create` is deprecated! Use `rake db:create:clickhouse` instead'
end
desc 'Drops the database from DATABASE_URL or config/database.yml'
task drop: ['db:check_protected_environments'] do
- config = ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'clickhouse')
- ActiveRecord::Tasks::DatabaseTasks.drop(config)
+ puts 'Warning: `rake clickhouse:drop` is deprecated! Use `rake db:drop:clickhouse` instead'
end
desc 'Empty the database from DATABASE_URL or config/database.yml'
task purge: ['db:check_protected_environments'] do
- config = ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'clickhouse')
- ActiveRecord::Tasks::DatabaseTasks.purge(config)
+ puts 'Warning: `rake clickhouse:purge` is deprecated! Use `rake db:reset:clickhouse` instead'
end
# desc 'Resets your database using your migrations for the current environment'
task :reset do
- Rake::Task['clickhouse:purge'].execute
- Rake::Task['clickhouse:migrate'].execute
+ puts 'Warning: `rake clickhouse:reset` is deprecated! Use `rake db:reset:clickhouse` instead'
end
desc 'Migrate the clickhouse database'
task migrate: %i[prepare_schema_migration_table prepare_internal_metadata_table] do
+ puts 'Warning: `rake clickhouse:migrate` is deprecated! Use `rake db:migrate:clickhouse` instead'
Rake::Task['db:migrate:clickhouse'].execute
if File.exist? "#{Rails.root}/db/clickhouse_schema_simple.rb"
Rake::Task['clickhouse:schema:dump'].execute(simple: true)
@@ -81,9 +79,14 @@ namespace :clickhouse do
desc 'Rollback the clickhouse database'
task rollback: %i[prepare_schema_migration_table prepare_internal_metadata_table] do
+ puts 'Warning: `rake clickhouse:rollback` is deprecated! Use `rake db:rollback:clickhouse` instead'
Rake::Task['db:rollback:clickhouse'].execute
if File.exist? "#{Rails.root}/db/clickhouse_schema_simple.rb"
Rake::Task['clickhouse:schema:dump'].execute(simple: true)
end
end
+
+ def config
+ ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'clickhouse')
+ end
end
diff --git a/spec/cases/model_spec.rb b/spec/cases/model_spec.rb
deleted file mode 100644
index fafd3993..00000000
--- a/spec/cases/model_spec.rb
+++ /dev/null
@@ -1,212 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.describe 'Model', :migrations do
-
- let(:date) { Date.today }
-
- context 'sample' do
- let!(:model) do
- class ModelJoin < ActiveRecord::Base
- self.table_name = 'joins'
- belongs_to :model, class_name: 'Model'
- end
- class Model < ActiveRecord::Base
- self.table_name = 'sample'
- has_many :joins, class_name: 'ModelJoin', primary_key: 'event_name'
- end
- Model
- end
-
- before do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'add_sample_data')
- quietly { ActiveRecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
- end
-
-
- describe '#create' do
- it 'creates a new record' do
- expect {
- model.create!(
- event_name: 'some event',
- date: date
- )
- }.to change { model.count }
- end
-
- it 'insert all' do
- if ActiveRecord::version >= Gem::Version.new('6')
- model.insert_all([
- {event_name: 'some event 1', date: date},
- {event_name: 'some event 2', date: date},
- ])
- expect(model.count).to eq(2)
- end
- end
- end
-
- describe '#update' do
- let(:record) { model.create!(event_name: 'some event', date: date) }
-
- it 'raises an error' do
- record.update!(event_name: 'new event name')
- expect(model.where(event_name: 'new event name').count).to eq(1)
- end
- end
-
- describe '#destroy' do
- let(:record) { model.create!(event_name: 'some event', date: date) }
-
- it 'raises an error' do
- record.destroy!
- expect(model.count).to eq(0)
- end
- end
-
- describe '#reverse_order!' do
- it 'blank' do
- expect(model.all.reverse_order!.map(&:event_name)).to eq([])
- end
-
- it 'select' do
- model.create!(event_name: 'some event 1', date: 1.day.ago)
- model.create!(event_name: 'some event 2', date: 2.day.ago)
- expect(model.all.reverse_order!.map(&:event_name)).to eq(['some event 1', 'some event 2'])
- end
- end
-
- describe 'convert type with aggregations' do
- let!(:record1) { model.create!(event_name: 'some event', event_value: 1, date: date) }
- let!(:record2) { model.create!(event_name: 'some event', event_value: 3, date: date) }
-
- it 'integer' do
- expect(model.select(Arel.sql('sum(event_value) AS event_value')).first.event_value.class).to eq(Integer)
- expect(model.select(Arel.sql('sum(event_value) AS value')).first.attributes['value'].class).to eq(Integer)
- expect(model.pluck(Arel.sql('sum(event_value)')).first[0].class).to eq(Integer)
- end
- end
-
- describe 'boolean column type' do
- let!(:record1) { model.create!(event_name: 'some event', event_value: 1, date: date) }
-
- it 'bool result' do
- expect(model.first.enabled.class).to eq(FalseClass)
- end
-
- it 'is mapped to :boolean' do
- type = model.columns_hash['enabled'].type
- expect(type).to eq(:boolean)
- end
- end
-
- describe 'UUID column type' do
- let(:random_uuid) { SecureRandom.uuid }
- let!(:record1) do
- model.create!(event_name: 'some event', event_value: 1, date: date, relation_uuid: random_uuid)
- end
-
- it 'is mapped to :uuid' do
- type = model.columns_hash['relation_uuid'].type
- expect(type).to eq(:uuid)
- end
-
- it 'accepts proper value' do
- expect(record1.relation_uuid).to eq(random_uuid)
- end
-
- it 'does not accept invalid values' do
- record1.relation_uuid = 'invalid-uuid'
- expect(record1.relation_uuid).to be_nil
- end
- end
-
- describe '#settings' do
- it 'works' do
- sql = model.settings(optimize_read_in_order: 1, cast_keep_nullable: 1).to_sql
- expect(sql).to eq('SELECT sample.* FROM sample SETTINGS optimize_read_in_order = 1, cast_keep_nullable = 1')
- end
-
- it 'quotes' do
- sql = model.settings(foo: :bar).to_sql
- expect(sql).to eq('SELECT sample.* FROM sample SETTINGS foo = \'bar\'')
- end
-
- it 'allows passing the symbol :default to reset a setting' do
- sql = model.settings(max_insert_block_size: :default).to_sql
- expect(sql).to eq('SELECT sample.* FROM sample SETTINGS max_insert_block_size = DEFAULT')
- end
- end
-
- describe '#using' do
- it 'works' do
- sql = model.joins(:joins).using(:event_name, :date).to_sql
- expect(sql).to eq('SELECT sample.* FROM sample INNER JOIN joins USING event_name,date')
- end
- end
-
- describe 'DateTime64 create' do
- it 'create a new record' do
- time = DateTime.parse('2023-07-21 08:00:00.123')
- model.create!(datetime: time, datetime64: time)
- row = model.first
- expect(row.datetime).to_not eq(row.datetime64)
- expect(row.datetime.strftime('%Y-%m-%d %H:%M:%S')).to eq('2023-07-21 08:00:00')
- expect(row.datetime64.strftime('%Y-%m-%d %H:%M:%S.%3N')).to eq('2023-07-21 08:00:00.123')
- end
- end
-
- describe 'final request' do
- let!(:record1) { model.create!(date: date, event_name: '1') }
- let!(:record2) { model.create!(date: date, event_name: '1') }
-
- it 'select' do
- expect(model.count).to eq(2)
- expect(model.final.count).to eq(1)
- expect(model.final!.count).to eq(1)
- expect(model.final.where(date: '2023-07-21').to_sql).to eq('SELECT sample.* FROM sample FINAL WHERE sample.date = \'2023-07-21\'')
- end
- end
- end
-
- context 'array' do
-
- let!(:model) do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'actions'
- end
- end
-
- before do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'add_array_datetime')
- quietly { ActiveRecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
- end
-
- describe '#create' do
- it 'creates a new record' do
- expect {
- model.create!(
- array_datetime: [1.day.ago, Time.now, '2022-12-06 15:22:49'],
- array_string: %w[asdf jkl],
- date: date
- )
- }.to change { model.count }
- event = model.first
- expect(event.array_datetime.is_a?(Array)).to be_truthy
- expect(event.array_datetime[0].is_a?(DateTime)).to be_truthy
- expect(event.array_string[0].is_a?(String)).to be_truthy
- expect(event.array_string).to eq(%w[asdf jkl])
- end
-
- it 'get record' do
- model.connection.insert("INSERT INTO #{model.table_name} (id, array_datetime, date) VALUES (1, '[''2022-12-06 15:22:49'',''2022-12-05 15:22:49'']', '2022-12-06')")
- expect(model.count).to eq(1)
- event = model.first
- expect(event.date.is_a?(Date)).to be_truthy
- expect(event.date).to eq(Date.parse('2022-12-06'))
- expect(event.array_datetime.is_a?(Array)).to be_truthy
- expect(event.array_datetime[0].is_a?(DateTime)).to be_truthy
- expect(event.array_datetime[0]).to eq('2022-12-06 15:22:49')
- expect(event.array_datetime[1]).to eq('2022-12-05 15:22:49')
- end
- end
- end
-end
diff --git a/spec/cluster/migration_spec.rb b/spec/cluster/migration_spec.rb
new file mode 100644
index 00000000..e8f9455b
--- /dev/null
+++ b/spec/cluster/migration_spec.rb
@@ -0,0 +1,142 @@
+# frozen_string_literal: true
+
+RSpec.describe 'Cluster Migration', :migrations do
+ describe 'performs migrations' do
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'some'
+ end
+ end
+ let(:directory) { raise 'NotImplemented' }
+ let(:migrations_dir) { File.join(FIXTURES_PATH, 'migrations', directory) }
+ let(:migration_context) { ActiveRecord::MigrationContext.new(migrations_dir, model.connection.schema_migration, model.connection.internal_metadata) }
+
+ connection_config = ActiveRecord::Base.connection_db_config.configuration_hash
+
+ before(:all) do
+ raise 'Unknown cluster name in config' if connection_config[:cluster_name].blank?
+ end
+
+ subject do
+ quietly { migration_context.up }
+ end
+
+ context 'dsl' do
+ context 'with distributed' do
+ let(:model_distributed) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'some_distributed'
+ end
+ end
+
+ let(:directory) { 'dsl_create_table_with_distributed' }
+ it 'creates a table with distributed table' do
+ subject
+
+ current_schema = schema(model)
+ current_schema_distributed = schema(model_distributed)
+
+ expect(current_schema.keys.count).to eq(1)
+ expect(current_schema_distributed.keys.count).to eq(1)
+
+ expect(current_schema).to have_key('date')
+ expect(current_schema_distributed).to have_key('date')
+
+ expect(current_schema['date'].sql_type).to eq('Date')
+ expect(current_schema_distributed['date'].sql_type).to eq('Date')
+ end
+
+ it 'drops a table with distributed table' do
+ subject
+
+ expect(ActiveRecord::Base.connection.tables).to include('some')
+ expect(ActiveRecord::Base.connection.tables).to include('some_distributed')
+
+ quietly do
+ migration_context.down
+ end
+
+ expect(ActiveRecord::Base.connection.tables).not_to include('some')
+ expect(ActiveRecord::Base.connection.tables).not_to include('some_distributed')
+ end
+ end
+
+ context "function" do
+ after do
+ ActiveRecord::Base.connection.drop_functions
+ end
+
+ context 'dsl' do
+ let(:directory) { 'dsl_create_function' }
+
+ it 'creates a function' do
+ subject
+
+ expect(ActiveRecord::Base.connection.functions).to match_array(['some_fun'])
+ end
+ end
+ end
+ end
+
+ context 'with alias in cluster_name' do
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'some'
+ end
+ end
+ connection_config = ActiveRecord::Base.connection_db_config.configuration_hash
+
+ before(:all) do
+ ActiveRecord::Base.establish_connection(connection_config.merge(cluster_name: '{cluster}'))
+ end
+
+ after(:all) do
+ ActiveRecord::Base.establish_connection(connection_config)
+ end
+
+ let(:directory) { 'dsl_create_table_with_cluster_name_alias' }
+ it 'creates a table' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(1)
+ expect(current_schema).to have_key('date')
+ expect(current_schema['date'].sql_type).to eq('Date')
+ end
+
+ it 'drops a table' do
+ subject
+
+ expect(ActiveRecord::Base.connection.tables).to include('some')
+
+ # Need for sync between clickhouse servers
+ ActiveRecord::Base.connection.execute('SELECT * FROM schema_migrations')
+
+ quietly do
+ migration_context.down
+ end
+
+ expect(ActiveRecord::Base.connection.tables).not_to include('some')
+ end
+ end
+
+ context 'create table with index' do
+ let(:directory) { 'dsl_create_table_with_index' }
+
+ it 'creates a table' do
+
+ expect_any_instance_of(ActiveRecord::ConnectionAdapters::ClickhouseAdapter).to receive(:execute)
+ .with('ALTER TABLE some ON CLUSTER ' + connection_config[:cluster_name] + ' DROP INDEX idx')
+ .and_call_original
+ expect_any_instance_of(ActiveRecord::ConnectionAdapters::ClickhouseAdapter).to receive(:execute)
+ .with('ALTER TABLE some ON CLUSTER ' + connection_config[:cluster_name] + ' ADD INDEX idx2 (int1 * int2) TYPE set(10) GRANULARITY 4')
+ .and_call_original
+
+ subject
+
+ expect(ActiveRecord::Base.connection.show_create_table('some')).to include('INDEX idx2 int1 * int2 TYPE set(10) GRANULARITY 4')
+ end
+ end
+ end
+end
diff --git a/spec/fixtures/migrations/add_array_datetime/1_create_actions_table.rb b/spec/fixtures/migrations/add_array_datetime/1_create_actions_table.rb
index fa1d985b..7a1cd7f2 100644
--- a/spec/fixtures/migrations/add_array_datetime/1_create_actions_table.rb
+++ b/spec/fixtures/migrations/add_array_datetime/1_create_actions_table.rb
@@ -5,6 +5,7 @@ def up
create_table :actions, options: 'MergeTree ORDER BY date', force: true do |t|
t.datetime :array_datetime, null: false, array: true
t.string :array_string, null: false, array: true
+ t.integer :array_int, null: false, array: true
t.date :date, null: false
end
end
diff --git a/spec/fixtures/migrations/add_sample_data/1_create_sample_table.rb b/spec/fixtures/migrations/add_sample_data/1_create_sample_table.rb
index 8a2570ac..9619bfe5 100644
--- a/spec/fixtures/migrations/add_sample_data/1_create_sample_table.rb
+++ b/spec/fixtures/migrations/add_sample_data/1_create_sample_table.rb
@@ -9,7 +9,9 @@ def up
t.date :date, null: false
t.datetime :datetime, null: false
t.datetime :datetime64, precision: 3, null: true
+ t.string :byte_array, null: true
t.uuid :relation_uuid
+ t.decimal :decimal_value, precision: 38, scale: 16, null: true
end
end
end
diff --git a/spec/fixtures/migrations/add_sample_data/2_create_join_table.rb b/spec/fixtures/migrations/add_sample_data/2_create_join_table.rb
index bfbacbcd..7f4fb543 100644
--- a/spec/fixtures/migrations/add_sample_data/2_create_join_table.rb
+++ b/spec/fixtures/migrations/add_sample_data/2_create_join_table.rb
@@ -4,6 +4,7 @@ class CreateJoinTable < ActiveRecord::Migration[5.0]
def up
create_table :joins, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (event_name)' do |t|
t.string :event_name, null: false
+ t.integer :event_value
t.integer :join_value
t.date :date, null: false
end
diff --git a/spec/fixtures/migrations/dsl_create_function/1_create_some_function.rb b/spec/fixtures/migrations/dsl_create_function/1_create_some_function.rb
new file mode 100644
index 00000000..8237036d
--- /dev/null
+++ b/spec/fixtures/migrations/dsl_create_function/1_create_some_function.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+class CreateSomeFunction < ActiveRecord::Migration[5.0]
+ def up
+ create_function :some_fun, "(x,y) -> x + y"
+ end
+end
diff --git a/spec/fixtures/migrations/dsl_create_table_with_cluster_name_alias/1_create_some_table.rb b/spec/fixtures/migrations/dsl_create_table_with_cluster_name_alias/1_create_some_table.rb
index 45f38644..0fc454df 100644
--- a/spec/fixtures/migrations/dsl_create_table_with_cluster_name_alias/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_create_table_with_cluster_name_alias/1_create_some_table.rb
@@ -1,6 +1,6 @@
class CreateSomeTable < ActiveRecord::Migration[5.0]
def change
- create_table :some, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (date)' do |t|
+ create_table :some, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (date)', sync: true, id: false do |t|
t.date :date, null: false
end
end
diff --git a/spec/fixtures/migrations/dsl_create_table_with_index/1_create_some_table.rb b/spec/fixtures/migrations/dsl_create_table_with_index/1_create_some_table.rb
new file mode 100644
index 00000000..384f2de9
--- /dev/null
+++ b/spec/fixtures/migrations/dsl_create_table_with_index/1_create_some_table.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+class CreateSomeTable < ActiveRecord::Migration[7.1]
+ def up
+ create_table :some, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (date)' do |t|
+ t.integer :int1, null: false
+ t.integer :int2, null: false
+ t.date :date, null: false
+
+ t.index '(int1 * int2, date)', name: 'idx', type: 'minmax', granularity: 3
+ end
+ end
+end
+
diff --git a/spec/fixtures/migrations/dsl_create_table_with_index/2_drop_index.rb b/spec/fixtures/migrations/dsl_create_table_with_index/2_drop_index.rb
new file mode 100644
index 00000000..066eae78
--- /dev/null
+++ b/spec/fixtures/migrations/dsl_create_table_with_index/2_drop_index.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+class DropIndex < ActiveRecord::Migration[7.1]
+ def up
+ remove_index :some, 'idx'
+ end
+end
+
diff --git a/spec/fixtures/migrations/dsl_create_table_with_index/3_create_index.rb b/spec/fixtures/migrations/dsl_create_table_with_index/3_create_index.rb
new file mode 100644
index 00000000..0b0c5898
--- /dev/null
+++ b/spec/fixtures/migrations/dsl_create_table_with_index/3_create_index.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+class CreateIndex < ActiveRecord::Migration[7.1]
+ def up
+ add_index :some, 'int1 * int2', name: 'idx2', type: 'set(10)', granularity: 4
+ end
+end
+
diff --git a/spec/fixtures/migrations/dsl_table_buffer_creation/1_create_some_table.rb b/spec/fixtures/migrations/dsl_table_buffer_creation/1_create_some_table.rb
new file mode 100644
index 00000000..c50e7a12
--- /dev/null
+++ b/spec/fixtures/migrations/dsl_table_buffer_creation/1_create_some_table.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+class CreateSomeTable < ActiveRecord::Migration[5.0]
+ def up
+ create_table :some do
+
+ end
+ create_table :some_buffers, as: :some, options: "Buffer(#{connection.database}, some, 1, 10, 60, 100, 10000, 10000000, 100000000)"
+ end
+end
+
diff --git a/spec/fixtures/migrations/plain_function_creation/1_create_some_function.rb b/spec/fixtures/migrations/plain_function_creation/1_create_some_function.rb
new file mode 100644
index 00000000..a9a558fd
--- /dev/null
+++ b/spec/fixtures/migrations/plain_function_creation/1_create_some_function.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+class CreateSomeFunction < ActiveRecord::Migration[5.0]
+ def up
+ sql = <<~SQL
+ CREATE FUNCTION some_fun AS (x,y) -> x + y
+ SQL
+ do_execute(sql, format: nil)
+ end
+end
diff --git a/spec/cases/migration_spec.rb b/spec/single/migration_spec.rb
similarity index 76%
rename from spec/cases/migration_spec.rb
rename to spec/single/migration_spec.rb
index 6fb590a6..ac64d7ff 100644
--- a/spec/cases/migration_spec.rb
+++ b/spec/single/migration_spec.rb
@@ -11,16 +11,22 @@
let(:migrations_dir) { File.join(FIXTURES_PATH, 'migrations', directory) }
let(:migration_context) { ActiveRecord::MigrationContext.new(migrations_dir, model.connection.schema_migration, model.connection.internal_metadata) }
- if ActiveRecord::version >= Gem::Version.new('6.1')
- connection_config = ActiveRecord::Base.connection_db_config.configuration_hash
- else
- connection_config = ActiveRecord::Base.connection_config
- end
+ connection_config = ActiveRecord::Base.connection_db_config.configuration_hash
subject do
quietly { migration_context.up }
end
+ context 'database creation' do
+ let(:db) { (0...8).map { (65 + rand(26)).chr }.join.downcase }
+
+ it 'create' do
+ model.connection.create_database(db)
+ end
+
+ after { model.connection.drop_database(db) }
+ end
+
context 'table creation' do
context 'plain' do
let(:directory) { 'plain_table_creation' }
@@ -52,6 +58,15 @@
end
end
+ context 'with buffer table' do
+ let(:directory) { 'dsl_table_buffer_creation' }
+ it 'creates a table' do
+ subject
+
+ expect(ActiveRecord::Base.connection.tables).to include('some_buffers')
+ end
+ end
+
context 'with engine' do
let(:directory) { 'dsl_table_with_engine_creation' }
it 'creates a table' do
@@ -180,53 +195,6 @@
end
end
- context 'with distributed' do
- let(:model_distributed) do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'some_distributed'
- end
- end
-
- before(:all) do
- ActiveRecord::Base.establish_connection(connection_config.merge(cluster_name: CLUSTER_NAME))
- end
-
- after(:all) do
- ActiveRecord::Base.establish_connection(connection_config)
- end
-
- let(:directory) { 'dsl_create_table_with_distributed' }
- it 'creates a table with distributed table' do
- subject
-
- current_schema = schema(model)
- current_schema_distributed = schema(model_distributed)
-
- expect(current_schema.keys.count).to eq(1)
- expect(current_schema_distributed.keys.count).to eq(1)
-
- expect(current_schema).to have_key('date')
- expect(current_schema_distributed).to have_key('date')
-
- expect(current_schema['date'].sql_type).to eq('Date')
- expect(current_schema_distributed['date'].sql_type).to eq('Date')
- end
-
- it 'drops a table with distributed table' do
- subject
-
- expect(ActiveRecord::Base.connection.tables).to include('some')
- expect(ActiveRecord::Base.connection.tables).to include('some_distributed')
-
- quietly do
- migration_context.down
- end
-
- expect(ActiveRecord::Base.connection.tables).not_to include('some')
- expect(ActiveRecord::Base.connection.tables).not_to include('some_distributed')
- end
- end
-
context 'creates a view' do
let(:directory) { 'dsl_create_view_with_to_section' }
it 'creates a view' do
@@ -250,49 +218,55 @@
expect(ActiveRecord::Base.connection.tables).not_to include('some_view')
end
end
- end
- context 'with alias in cluster_name' do
- let(:model) do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'some'
+ context 'with index' do
+ let(:directory) { 'dsl_create_table_with_index' }
+
+ it 'creates a table' do
+ quietly { migration_context.up(1) }
+
+ expect(ActiveRecord::Base.connection.show_create_table('some')).to include('INDEX idx (int1 * int2, date) TYPE minmax GRANULARITY 3')
+
+ quietly { migration_context.up(2) }
+
+ expect(ActiveRecord::Base.connection.show_create_table('some')).to_not include('INDEX idx')
+
+ quietly { migration_context.up(3) }
+
+ expect(ActiveRecord::Base.connection.show_create_table('some')).to include('INDEX idx2 int1 * int2 TYPE set(10) GRANULARITY 4')
end
- end
- if ActiveRecord::version >= Gem::Version.new('6.1')
- connection_config = ActiveRecord::Base.connection_db_config.configuration_hash
- else
- connection_config = ActiveRecord::Base.connection_config
- end
- before(:all) do
- ActiveRecord::Base.establish_connection(connection_config.merge(cluster_name: '{cluster}'))
- end
+ it 'add index if not exists' do
+ subject
- after(:all) do
- ActiveRecord::Base.establish_connection(connection_config)
- end
+ expect { ActiveRecord::Base.connection.add_index('some', 'int1 + int2', name: 'idx2', type: 'minmax', granularity: 1) }.to raise_error(ActiveRecord::ActiveRecordError, include('already exists'))
- let(:directory) { 'dsl_create_table_with_cluster_name_alias' }
- it 'creates a table' do
- subject
+ ActiveRecord::Base.connection.add_index('some', 'int1 + int2', name: 'idx2', type: 'minmax', granularity: 1, if_not_exists: true)
+ end
- current_schema = schema(model)
+ it 'drop index if exists' do
+ subject
- expect(current_schema.keys.count).to eq(1)
- expect(current_schema).to have_key('date')
- expect(current_schema['date'].sql_type).to eq('Date')
- end
+ expect { ActiveRecord::Base.connection.remove_index('some', 'idx3') }.to raise_error(ActiveRecord::ActiveRecordError, include('Cannot find index'))
- it 'drops a table' do
- subject
+ ActiveRecord::Base.connection.remove_index('some', 'idx2')
+ end
- expect(ActiveRecord::Base.connection.tables).to include('some')
+ it 'rebuid index' do
+ subject
+
+ expect { ActiveRecord::Base.connection.rebuild_index('some', 'idx3') }.to raise_error(ActiveRecord::ActiveRecordError, include('Unknown index'))
+
+ expect { ActiveRecord::Base.connection.rebuild_index('some', 'idx3', true) }.to_not raise_error(ActiveRecord::ActiveRecordError)
- quietly do
- migration_context.down
+ ActiveRecord::Base.connection.rebuild_index('some', 'idx2')
end
- expect(ActiveRecord::Base.connection.tables).not_to include('some')
+ it 'clear index' do
+ subject
+
+ ActiveRecord::Base.connection.clear_index('some', 'idx2')
+ end
end
end
end
@@ -352,5 +326,29 @@
expect(current_schema['date'].sql_type).to eq('Date')
end
end
+
+ context 'function creation' do
+ after do
+ ActiveRecord::Base.connection.drop_functions
+ end
+
+ context 'plain' do
+ let(:directory) { 'plain_function_creation' }
+ it 'creates a function' do
+ subject
+
+ expect(ActiveRecord::Base.connection.functions).to match_array(['some_fun'])
+ end
+ end
+
+ context 'dsl' do
+ let(:directory) { 'dsl_create_function' }
+ it 'creates a function' do
+ subject
+
+ expect(ActiveRecord::Base.connection.functions).to match_array(['some_fun'])
+ end
+ end
+ end
end
end
diff --git a/spec/single/model_spec.rb b/spec/single/model_spec.rb
new file mode 100644
index 00000000..e9e46b54
--- /dev/null
+++ b/spec/single/model_spec.rb
@@ -0,0 +1,319 @@
+# frozen_string_literal: true
+
+RSpec.describe 'Model', :migrations do
+
+ class ModelJoin < ActiveRecord::Base
+ self.table_name = 'joins'
+ belongs_to :model, class_name: 'Model'
+ end
+ class Model < ActiveRecord::Base
+ self.table_name = 'sample'
+ has_many :joins, class_name: 'ModelJoin', primary_key: 'event_name'
+ end
+ class ModelPk < ActiveRecord::Base
+ self.table_name = 'sample'
+ self.primary_key = 'event_name'
+ end
+
+ let(:date) { Date.today }
+
+ context 'sample' do
+
+ before do
+ migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'add_sample_data')
+ quietly { ActiveRecord::MigrationContext.new(migrations_dir, Model.connection.schema_migration).up }
+ end
+
+ describe '#do_execute' do
+ it 'returns formatted result' do
+ result = Model.connection.do_execute('SELECT 1 AS t')
+ expect(result['data']).to eq([[1]])
+ expect(result['meta']).to eq([{ 'name' => 't', 'type' => 'UInt8' }])
+ end
+
+ context 'with JSONCompact format' do
+ it 'returns formatted result' do
+ result = Model.connection.do_execute('SELECT 1 AS t', format: 'JSONCompact')
+ expect(result['data']).to eq([[1]])
+ expect(result['meta']).to eq([{ 'name' => 't', 'type' => 'UInt8' }])
+ end
+ end
+
+ context 'with JSONCompactEachRowWithNamesAndTypes format' do
+ it 'returns formatted result' do
+ result = Model.connection.do_execute('SELECT 1 AS t', format: 'JSONCompactEachRowWithNamesAndTypes')
+ expect(result['data']).to eq([[1]])
+ expect(result['meta']).to eq([{ 'name' => 't', 'type' => 'UInt8' }])
+ end
+ end
+ end
+
+ describe '#create' do
+ it 'creates a new record' do
+ expect {
+ Model.create!(
+ event_name: 'some event',
+ date: date
+ )
+ }.to change { Model.count }
+ end
+
+ it 'insert all' do
+ if ActiveRecord::version >= Gem::Version.new('6')
+ Model.insert_all([
+ {event_name: 'some event 1', date: date},
+ {event_name: 'some event 2', date: date},
+ ])
+ expect(Model.count).to eq(2)
+ end
+ end
+ end
+
+ describe '#update' do
+ let!(:record) { Model.create!(event_name: 'some event', event_value: 1, date: date) }
+
+ it 'update' do
+ expect {
+ Model.where(event_name: 'some event').update_all(event_value: 2)
+ }.to_not raise_error
+ end
+
+ it 'update model with primary key' do
+ expect {
+ ModelPk.first.update!(event_value: 2)
+ }.to_not raise_error
+ end
+ end
+
+ describe '#delete' do
+ let!(:record) { Model.create!(event_name: 'some event', date: date) }
+
+ it 'model destroy' do
+ expect {
+ record.destroy!
+ }.to raise_error(ActiveRecord::ActiveRecordError, 'Deleting a row is not possible without a primary key')
+ end
+
+ it 'scope' do
+ expect {
+ Model.where(event_name: 'some event').delete_all
+ }.to_not raise_error
+ end
+
+ it 'destroy model with primary key' do
+ expect {
+ ModelPk.first.destroy!
+ }.to_not raise_error
+ end
+ end
+
+ describe '#reverse_order!' do
+ it 'blank' do
+ expect(Model.all.reverse_order!.map(&:event_name)).to eq([])
+ end
+
+ it 'select' do
+ Model.create!(event_name: 'some event 1', date: 1.day.ago)
+ Model.create!(event_name: 'some event 2', date: 2.day.ago)
+ expect(Model.all.reverse_order!.map(&:event_name)).to eq(['some event 1', 'some event 2'])
+ end
+ end
+
+ describe 'convert type with aggregations' do
+ let!(:record1) { Model.create!(event_name: 'some event', event_value: 1, date: date) }
+ let!(:record2) { Model.create!(event_name: 'some event', event_value: 3, date: date) }
+
+ it 'integer' do
+ expect(Model.select(Arel.sql('sum(event_value) AS event_value')).first.event_value.class).to eq(Integer)
+ expect(Model.select(Arel.sql('sum(event_value) AS value')).first.attributes['value'].class).to eq(Integer)
+ expect(Model.pluck(Arel.sql('sum(event_value)')).first[0].class).to eq(Integer)
+ end
+ end
+
+ describe 'boolean column type' do
+ let!(:record1) { Model.create!(event_name: 'some event', event_value: 1, date: date) }
+
+ it 'bool result' do
+ expect(Model.first.enabled.class).to eq(FalseClass)
+ end
+
+ it 'is mapped to :boolean' do
+ type = Model.columns_hash['enabled'].type
+ expect(type).to eq(:boolean)
+ end
+ end
+
+ describe 'string column type as byte array' do
+ let(:bytes) { (0..255).to_a }
+ let!(:record1) { Model.create!(event_name: 'some event', byte_array: bytes.pack('C*')) }
+
+ it 'keeps all bytes' do
+ returned_byte_array = Model.first.byte_array
+
+ expect(returned_byte_array.unpack('C*')).to eq(bytes)
+ end
+ end
+
+ describe 'UUID column type' do
+ let(:random_uuid) { SecureRandom.uuid }
+ let!(:record1) do
+ Model.create!(event_name: 'some event', event_value: 1, date: date, relation_uuid: random_uuid)
+ end
+
+ it 'is mapped to :uuid' do
+ type = Model.columns_hash['relation_uuid'].type
+ expect(type).to eq(:uuid)
+ end
+
+ it 'accepts proper value' do
+ expect(record1.relation_uuid).to eq(random_uuid)
+ end
+
+ it 'accepts non-canonical uuid' do
+ record1.relation_uuid = 'ABCD-0123-4567-89EF-dead-beef-0101-1010'
+ expect(record1.relation_uuid).to eq('abcd0123-4567-89ef-dead-beef01011010')
+ end
+
+ it 'does not accept invalid values' do
+ record1.relation_uuid = 'invalid-uuid'
+ expect(record1.relation_uuid).to be_nil
+ end
+ end
+
+ describe 'decimal column type' do
+ let!(:record1) do
+ Model.create!(event_name: 'some event', decimal_value: BigDecimal('95891.74'))
+ end
+
+ # If converted to float, the value would be 9589174.000000001. This happened previously
+ # due to JSON parsing of numeric values to floats.
+ it 'keeps precision' do
+ decimal_value = Model.first.decimal_value
+ expect(decimal_value).to eq(BigDecimal('95891.74'))
+ end
+ end
+
+ describe '#settings' do
+ it 'works' do
+ sql = Model.settings(optimize_read_in_order: 1, cast_keep_nullable: 1).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample SETTINGS optimize_read_in_order = 1, cast_keep_nullable = 1')
+ end
+
+ it 'quotes' do
+ sql = Model.settings(foo: :bar).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample SETTINGS foo = \'bar\'')
+ end
+
+ it 'allows passing the symbol :default to reset a setting' do
+ sql = Model.settings(max_insert_block_size: :default).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample SETTINGS max_insert_block_size = DEFAULT')
+ end
+ end
+
+ describe '#using' do
+ it 'works' do
+ sql = Model.joins(:joins).using(:event_name, :date).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample INNER JOIN joins USING event_name,date')
+ end
+
+ it 'works with filters' do
+ sql = Model.joins(:joins).using(:event_name, :date).where(joins: { event_value: 1 }).to_sql
+ expect(sql).to eq("SELECT sample.* FROM sample INNER JOIN joins USING event_name,date WHERE joins.event_value = 1")
+ end
+ end
+
+ describe 'arel predicates' do
+ describe '#matches' do
+ it 'uses ilike for case insensitive matches' do
+ sql = Model.where(Model.arel_table[:event_name].matches('some event')).to_sql
+ expect(sql).to eq("SELECT sample.* FROM sample WHERE sample.event_name ILIKE 'some event'")
+ end
+
+ it 'uses like for case sensitive matches' do
+ sql = Model.where(Model.arel_table[:event_name].matches('some event', nil, true)).to_sql
+ expect(sql).to eq("SELECT sample.* FROM sample WHERE sample.event_name LIKE 'some event'")
+ end
+ end
+ end
+
+ describe 'DateTime64 create' do
+ it 'create a new record' do
+ time = DateTime.parse('2023-07-21 08:00:00.123')
+ Model.create!(datetime: time, datetime64: time)
+ row = Model.first
+ expect(row.datetime).to_not eq(row.datetime64)
+ expect(row.datetime.strftime('%Y-%m-%d %H:%M:%S')).to eq('2023-07-21 08:00:00')
+ expect(row.datetime64.strftime('%Y-%m-%d %H:%M:%S.%3N')).to eq('2023-07-21 08:00:00.123')
+ end
+ end
+
+ describe 'final request' do
+ let!(:record1) { Model.create!(date: date, event_name: '1') }
+ let!(:record2) { Model.create!(date: date, event_name: '1') }
+
+ it 'select' do
+ expect(Model.count).to eq(2)
+ expect(Model.final.count).to eq(1)
+ expect(Model.final!.count).to eq(1)
+ expect(Model.final.where(date: '2023-07-21').to_sql).to eq('SELECT sample.* FROM sample FINAL WHERE sample.date = \'2023-07-21\'')
+ end
+ end
+ end
+
+ context 'array' do
+
+ let!(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'actions'
+ end
+ end
+
+ before do
+ migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'add_array_datetime')
+ quietly { ActiveRecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
+ end
+
+ describe '#create' do
+ it 'creates a new record' do
+ expect {
+ model.create!(
+ array_datetime: [1.day.ago, Time.now, '2022-12-06 15:22:49'],
+ array_string: %w[asdf jkl],
+ array_int: [1, 2],
+ date: date
+ )
+ }.to change { model.count }
+ event = model.first
+ expect(event.array_datetime.is_a?(Array)).to be_truthy
+ expect(event.array_datetime[0].is_a?(DateTime)).to be_truthy
+ expect(event.array_string[0].is_a?(String)).to be_truthy
+ expect(event.array_string).to eq(%w[asdf jkl])
+ expect(event.array_int.is_a?(Array)).to be_truthy
+ expect(event.array_int).to eq([1, 2])
+ end
+
+ it 'create with insert all' do
+ expect {
+ model.insert_all([{
+ array_datetime: [1.day.ago, Time.now, '2022-12-06 15:22:49'],
+ array_string: %w[asdf jkl],
+ array_int: [1, 2],
+ date: date
+ }])
+ }.to change { model.count }
+ end
+
+ it 'get record' do
+ model.connection.insert("INSERT INTO #{model.table_name} (id, array_datetime, date) VALUES (1, '[''2022-12-06 15:22:49'',''2022-12-05 15:22:49'']', '2022-12-06')")
+ expect(model.count).to eq(1)
+ event = model.first
+ expect(event.date.is_a?(Date)).to be_truthy
+ expect(event.date).to eq(Date.parse('2022-12-06'))
+ expect(event.array_datetime.is_a?(Array)).to be_truthy
+ expect(event.array_datetime[0].is_a?(DateTime)).to be_truthy
+ expect(event.array_datetime[0]).to eq('2022-12-06 15:22:49')
+ expect(event.array_datetime[1]).to eq('2022-12-05 15:22:49')
+ end
+ end
+ end
+end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 78e2fca4..ccf95d31 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -9,7 +9,6 @@
ClickhouseActiverecord.load
FIXTURES_PATH = File.join(File.dirname(__FILE__), 'fixtures')
-CLUSTER_NAME = 'test'
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
@@ -38,10 +37,12 @@
default: {
adapter: 'clickhouse',
host: 'localhost',
- port: 8123,
- database: 'test',
+ port: ENV['CLICKHOUSE_PORT'] || 8123,
+ database: ENV['CLICKHOUSE_DATABASE'] || 'test',
username: nil,
- password: nil
+ password: nil,
+ use_metadata_table: false,
+ cluster_name: ENV['CLICKHOUSE_CLUSTER'],
}
)
@@ -55,15 +56,11 @@ def schema(model)
end
def clear_db
- if ActiveRecord::version >= Gem::Version.new('6.1')
- cluster = ActiveRecord::Base.connection_db_config.configuration_hash[:cluster_name]
- else
- cluster = ActiveRecord::Base.connection_config[:cluster_name]
- end
+ cluster = ActiveRecord::Base.connection_db_config.configuration_hash[:cluster_name]
pattern = if cluster
normalized_cluster_name = cluster.start_with?('{') ? "'#{cluster}'" : cluster
- "DROP TABLE %s ON CLUSTER #{normalized_cluster_name}"
+ "DROP TABLE %s ON CLUSTER #{normalized_cluster_name} SYNC"
else
'DROP TABLE %s'
end