method
pk_and_sequence_for
v7.1.3.4 -
Show latest stable
-
0 notes -
Class: SchemaStatements
- 1.0.0
- 1.1.6
- 1.2.6
- 2.0.3
- 2.1.0
- 2.2.1
- 2.3.8
- 3.0.0
- 3.0.9
- 3.1.0
- 3.2.1
- 3.2.8
- 3.2.13
- 4.0.2
- 4.1.8
- 4.2.1 (0)
- 4.2.7 (0)
- 4.2.9 (0)
- 5.0.0.1 (0)
- 5.1.7 (0)
- 5.2.3 (0)
- 6.0.0 (0)
- 6.1.3.1 (0)
- 6.1.7.7 (0)
- 7.0.0 (0)
- 7.1.3.2 (0)
- 7.1.3.4 (0)
- What's this?
Related methods
- Instance methods (107)
- add_column
- add_column_for_alter
- add_exclusion_constraint
- add_foreign_key
- add_index
- add_index_opclass
- add_index_options
- add_options_for_index_columns
- add_timestamps_for_alter (<= v6.0.0)
- add_unique_constraint
- assert_valid_deferrable
- build_change_column_default...
- build_change_column_definition
- build_create_index_definition
- bulk_change_table (<= v5.2.3)
- change_column
- change_column_comment
- change_column_default
- change_column_default_for_a... (<= v7.0.0)
- change_column_for_alter
- change_column_null
- change_column_null_for_alter
- change_table_comment
- check_constraints
- client_min_messages
- client_min_messages=
- collation
- column_names_from_column_nu...
- columns (<= v5.0.0.1)
- columns_for_distinct
- create_alter_table
- create_database
- create_schema
- create_schema_dumper
- create_table_definition
- ctype
- current_database
- current_schema
- data_source_exists? (<= v5.0.0.1)
- data_sources (<= v5.0.0.1)
- data_source_sql
- default_sequence_name
- drop_database
- drop_schema
- drop_table
- encoding
- exclusion_constraint_for
- exclusion_constraint_for!
- exclusion_constraint_name
- exclusion_constraint_options
- exclusion_constraints
- extract_constraint_deferrable
- extract_foreign_key_action
- extract_foreign_key_deferrable (<= v7.0.0)
- extract_schema_qualified_name
- fetch_type_metadata
- foreign_key_column_for
- foreign_keys
- foreign_table_exists?
- foreign_tables
- indexes
- index_name
- index_name_exists?
- index_name_length (<= v5.0.0.1)
- new_column (<= v5.0.0.1)
- new_column_from_field
- ON
- pk_and_sequence_for
- primary_key (<= v4.2.9)
- primary_keys
- quoted_include_columns_for_...
- quoted_scope
- recreate_database
- reference_name_for_table
- remove_exclusion_constraint
- remove_index
- remove_index! (<= v4.2.9)
- remove_timestamps_for_alter (<= v6.0.0)
- remove_unique_constraint
- rename_column
- rename_index
- rename_table
- reset_pk_sequence!
- schema_creation
- schema_exists?
- schema_names
- schema_search_path
- schema_search_path=
- sequence_name_from_parts
- serial_sequence
- set_pk_sequence!
- table_comment
- table_exists? (<= v5.0.0.1)
- table_options
- tables (<= v5.0.0.1)
- type_to_sql
- unique_constraint_for
- unique_constraint_for!
- unique_constraint_name
- unique_constraint_options
- unique_constraints
- update_table_definition
- validate_check_constraint
- validate_constraint
- validate_foreign_key
- view_exists? (<= v5.0.0.1)
- views (<= v5.0.0.1)
= private
= protected
pk_and_sequence_for(table)
public
Returns a table’s primary key and belonging sequence.
Show source
# File activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb, line 305 def pk_and_sequence_for(table) # :nodoc: # First try looking for a sequence with a dependency on the # given table's primary key. result = query(<<~SQL, "SCHEMA")[0] SELECT attr.attname, nsp.nspname, seq.relname FROM pg_class seq, pg_attribute attr, pg_depend dep, pg_constraint cons, pg_namespace nsp WHERE seq.oid = dep.objid AND seq.relkind = 'S' AND attr.attrelid = dep.refobjid AND attr.attnum = dep.refobjsubid AND attr.attrelid = cons.conrelid AND attr.attnum = cons.conkey[1] AND seq.relnamespace = nsp.oid AND cons.contype = 'p' AND dep.classid = 'pg_class'::regclass AND dep.refobjid = #{quote(quote_table_name(table))}::regclass SQL if result.nil? || result.empty? result = query(<<~SQL, "SCHEMA")[0] SELECT attr.attname, nsp.nspname, CASE WHEN pg_get_expr(def.adbin, def.adrelid) !~* 'nextval' THEN NULL WHEN split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2) ~ '.' THEN substr(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2), strpos(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2), '.')+1) ELSE split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2) END FROM pg_class t JOIN pg_attribute attr ON (t.oid = attrelid) JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum) JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1]) JOIN pg_namespace nsp ON (t.relnamespace = nsp.oid) WHERE t.oid = #{quote(quote_table_name(table))}::regclass AND cons.contype = 'p' AND pg_get_expr(def.adbin, def.adrelid) ~* 'nextval|uuid_generate|gen_random_uuid' SQL end pk = result.shift if result.last [pk, PostgreSQL::Name.new(*result)] else [pk, nil] end rescue nil end def primary_keys(table_name) # :nodoc: query_values(<<~SQL, "SCHEMA") SELECT a.attname FROM ( SELECT indrelid, indkey, generate_subscripts(indkey, 1) idx FROM pg_index WHERE indrelid = #{quote(quote_table_name(table_name))}::regclass AND indisprimary ) i JOIN pg_attribute a ON a.attrelid = i.indrelid AND a.attnum = i.indkey[i.idx] ORDER BY i.idx SQL end # Renames a table. # Also renames a table's primary key sequence if the sequence name exists and # matches the Active Record default. # # Example: # rename_table('octopuses', 'octopi') def rename_table(table_name, new_name, **options) validate_table_length!(new_name) unless options[:_uses_legacy_table_name] clear_cache! schema_cache.clear_data_source_cache!(table_name.to_s) schema_cache.clear_data_source_cache!(new_name.to_s) execute "ALTER TABLE #{quote_table_name(table_name)} RENAME TO #{quote_table_name(new_name)}" pk, seq = pk_and_sequence_for(new_name) if pk # PostgreSQL automatically creates an index for PRIMARY KEY with name consisting of # truncated table name and "_pkey" suffix fitting into max_identifier_length number of characters. max_pkey_prefix = max_identifier_length - "_pkey".size idx = "#{table_name[0, max_pkey_prefix]}_pkey" new_idx = "#{new_name[0, max_pkey_prefix]}_pkey" execute "ALTER INDEX #{quote_table_name(idx)} RENAME TO #{quote_table_name(new_idx)}" # PostgreSQL automatically creates a sequence for PRIMARY KEY with name consisting of # truncated table name and "#{primary_key}_seq" suffix fitting into max_identifier_length number of characters. max_seq_prefix = max_identifier_length - "_#{pk}_seq".size if seq && seq.identifier == "#{table_name[0, max_seq_prefix]}_#{pk}_seq" new_seq = "#{new_name[0, max_seq_prefix]}_#{pk}_seq" execute "ALTER TABLE #{seq.quoted} RENAME TO #{quote_table_name(new_seq)}" end end rename_table_indexes(table_name, new_name) end def add_column(table_name, column_name, type, **options) # :nodoc: clear_cache! super change_column_comment(table_name, column_name, options[:comment]) if options.key?(:comment) end def change_column(table_name, column_name, type, **options) # :nodoc: clear_cache! sqls, procs = Array(change_column_for_alter(table_name, column_name, type, **options)).partition { |v| v.is_a?(String) } execute "ALTER TABLE #{quote_table_name(table_name)} #{sqls.join(", ")}" procs.each(&:call) end # Builds a ChangeColumnDefinition object. # # This definition object contains information about the column change that would occur # if the same arguments were passed to #change_column. See #change_column for information about # passing a +table_name+, +column_name+, +type+ and other options that can be passed. def build_change_column_definition(table_name, column_name, type, **options) # :nodoc: td = create_table_definition(table_name) cd = td.new_column_definition(column_name, type, **options) ChangeColumnDefinition.new(cd, column_name) end # Changes the default value of a table column. def change_column_default(table_name, column_name, default_or_changes) # :nodoc: execute "ALTER TABLE #{quote_table_name(table_name)} #{change_column_default_for_alter(table_name, column_name, default_or_changes)}" end def build_change_column_default_definition(table_name, column_name, default_or_changes) # :nodoc: column = column_for(table_name, column_name) return unless column default = extract_new_default_value(default_or_changes) ChangeColumnDefaultDefinition.new(column, default) end def change_column_null(table_name, column_name, null, default = nil) # :nodoc: validate_change_column_null_argument!(null) clear_cache! unless null || default.nil? column = column_for(table_name, column_name) execute "UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote_default_expression(default, column)} WHERE #{quote_column_name(column_name)} IS NULL" if column end execute "ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} #{null ? 'DROP' : 'SET'} NOT NULL" end # Adds comment for given table column or drops it if +comment+ is a +nil+ def change_column_comment(table_name, column_name, comment_or_changes) # :nodoc: clear_cache! comment = extract_new_comment_value(comment_or_changes) execute "COMMENT ON COLUMN #{quote_table_name(table_name)}.#{quote_column_name(column_name)} IS #{quote(comment)}" end # Adds comment for given table or drops it if +comment+ is a +nil+ def change_table_comment(table_name, comment_or_changes) # :nodoc: clear_cache! comment = extract_new_comment_value(comment_or_changes) execute "COMMENT ON TABLE #{quote_table_name(table_name)} IS #{quote(comment)}" end # Renames a column in a table. def rename_column(table_name, column_name, new_column_name) # :nodoc: clear_cache! execute("ALTER TABLE #{quote_table_name(table_name)} #{rename_column_sql(table_name, column_name, new_column_name)}") rename_column_indexes(table_name, column_name, new_column_name) end def add_index(table_name, column_name, **options) # :nodoc: create_index = build_create_index_definition(table_name, column_name, **options) result = execute schema_creation.accept(create_index) index = create_index.index execute "COMMENT ON INDEX #{quote_column_name(index.name)} IS #{quote(index.comment)}" if index.comment result end def build_create_index_definition(table_name, column_name, **options) # :nodoc: index, algorithm, if_not_exists = add_index_options(table_name, column_name, **options) CreateIndexDefinition.new(index, algorithm, if_not_exists) end def remove_index(table_name, column_name = nil, **options) # :nodoc: table = Utils.extract_schema_qualified_name(table_name.to_s) if options.key?(:name) provided_index = Utils.extract_schema_qualified_name(options[:name].to_s) options[:name] = provided_index.identifier table = PostgreSQL::Name.new(provided_index.schema, table.identifier) unless table.schema.present? if provided_index.schema.present? && table.schema != provided_index.schema raise ArgumentError.new("Index schema '#{provided_index.schema}' does not match table schema '#{table.schema}'") end end return if options[:if_exists] && !index_exists?(table_name, column_name, **options) index_to_remove = PostgreSQL::Name.new(table.schema, index_name_for_remove(table.to_s, column_name, options)) execute "DROP INDEX #{index_algorithm(options[:algorithm])} #{quote_table_name(index_to_remove)}" end # Renames an index of a table. Raises error if length of new # index name is greater than allowed limit. def rename_index(table_name, old_name, new_name) validate_index_length!(table_name, new_name) schema, = extract_schema_qualified_name(table_name) execute "ALTER INDEX #{quote_table_name(schema) + '.' if schema}#{quote_column_name(old_name)} RENAME TO #{quote_table_name(new_name)}" end def index_name(table_name, options) # :nodoc: _schema, table_name = extract_schema_qualified_name(table_name.to_s) super end def add_foreign_key(from_table, to_table, **options) if options[:deferrable] == true ActiveRecord.deprecator.warn(<<~MSG) `deferrable: true` is deprecated in favor of `deferrable: :immediate`, and will be removed in Rails 7.2. MSG options[:deferrable] = :immediate end assert_valid_deferrable(options[:deferrable]) super end def foreign_keys(table_name) scope = quoted_scope(table_name) fk_info = internal_exec_query(<<~SQL, "SCHEMA", allow_retry: true, materialize_transactions: false) SELECT t2.oid::regclass::text AS to_table, a1.attname AS column, a2.attname AS primary_key, c.conname AS name, c.confupdtype AS on_update, c.confdeltype AS on_delete, c.convalidated AS valid, c.condeferrable AS deferrable, c.condeferred AS deferred, c.conkey, c.confkey, c.conrelid, c.confrelid FROM pg_constraint c JOIN pg_class t1 ON c.conrelid = t1.oid JOIN pg_class t2 ON c.confrelid = t2.oid JOIN pg_attribute a1 ON a1.attnum = c.conkey[1] AND a1.attrelid = t1.oid JOIN pg_attribute a2 ON a2.attnum = c.confkey[1] AND a2.attrelid = t2.oid JOIN pg_namespace t3 ON c.connamespace = t3.oid WHERE c.contype = 'f' AND t1.relname = #{scope[:name]} AND t3.nspname = #{scope[:schema]} ORDER BY c.conname SQL fk_info.map do |row| to_table = Utils.unquote_identifier(row["to_table"]) conkey = row["conkey"].scan(/\d+/).map(&:to_i) confkey = row["confkey"].scan(/\d+/).map(&:to_i) if conkey.size > 1 column = column_names_from_column_numbers(row["conrelid"], conkey) primary_key = column_names_from_column_numbers(row["confrelid"], confkey) else column = Utils.unquote_identifier(row["column"]) primary_key = row["primary_key"] end options = { column: column, name: row["name"], primary_key: primary_key } options[:on_delete] = extract_foreign_key_action(row["on_delete"]) options[:on_update] = extract_foreign_key_action(row["on_update"]) options[:deferrable] = extract_constraint_deferrable(row["deferrable"], row["deferred"]) options[:validate] = row["valid"] ForeignKeyDefinition.new(table_name, to_table, options) end end def foreign_tables query_values(data_source_sql(type: "FOREIGN TABLE"), "SCHEMA") end def foreign_table_exists?(table_name) query_values(data_source_sql(table_name, type: "FOREIGN TABLE"), "SCHEMA").any? if table_name.present? end def check_constraints(table_name) # :nodoc: scope = quoted_scope(table_name) check_info = internal_exec_query( SELECT conname, pg_get_constraintdef(c.oid, true) AS constraintdef, c.convalidated AS valid FROM pg_constraint c JOIN pg_class t ON c.conrelid = t.oid JOIN pg_namespace n ON n.oid = c.connamespace WHERE c.contype = 'c' AND t.relname = #{scope[:name]} AND n.nspname = #{scope[:schema]}, "SCHEMA", allow_retry: true, materialize_transactions: false) check_info.map do |row| options = { name: row["conname"], validate: row["valid"] } expression = row["constraintdef"][/CHECK \((.+)\)/, 1] CheckConstraintDefinition.new(table_name, expression, options) end end # Returns an array of exclusion constraints for the given table. # The exclusion constraints are represented as ExclusionConstraintDefinition objects. def exclusion_constraints(table_name) scope = quoted_scope(table_name) exclusion_info = internal_exec_query( SELECT conname, pg_get_constraintdef(c.oid) AS constraintdef, c.condeferrable, c.condeferred FROM pg_constraint c JOIN pg_class t ON c.conrelid = t.oid JOIN pg_namespace n ON n.oid = c.connamespace WHERE c.contype = 'x' AND t.relname = #{scope[:name]} AND n.nspname = #{scope[:schema]}, "SCHEMA") exclusion_info.map do |row| method_and_elements, predicate = row["constraintdef"].split(" WHERE ") method_and_elements_parts = method_and_elements.match(/EXCLUDE(?: USING (?<using>\S+))? \((?<expression>.+)\)/) predicate.remove!(/ DEFERRABLE(?: INITIALLY (?:IMMEDIATE|DEFERRED))?/) if predicate predicate = predicate.from(2).to(-3) if predicate # strip 2 opening and closing parentheses deferrable = extract_constraint_deferrable(row["condeferrable"], row["condeferred"]) options = { name: row["conname"], using: method_and_elements_parts["using"].to_sym, where: predicate, deferrable: deferrable } ExclusionConstraintDefinition.new(table_name, method_and_elements_parts["expression"], options) end end # Returns an array of unique constraints for the given table. # The unique constraints are represented as UniqueConstraintDefinition objects. def unique_constraints(table_name) scope = quoted_scope(table_name) unique_info = internal_exec_query(<<~SQL, "SCHEMA", allow_retry: true, materialize_transactions: false) SELECT c.conname, c.conrelid, c.conkey, c.condeferrable, c.condeferred FROM pg_constraint c JOIN pg_class t ON c.conrelid = t.oid JOIN pg_namespace n ON n.oid = c.connamespace WHERE c.contype = 'u' AND t.relname = #{scope[:name]} AND n.nspname = #{scope[:schema]} SQL unique_info.map do |row| conkey = row["conkey"].delete("{}").split(",").map(&:to_i) columns = column_names_from_column_numbers(row["conrelid"], conkey) deferrable = extract_constraint_deferrable(row["condeferrable"], row["condeferred"]) options = { name: row["conname"], deferrable: deferrable } UniqueConstraintDefinition.new(table_name, columns, options) end end # Adds a new exclusion constraint to the table. +expression+ is a String # representation of a list of exclusion elements and operators. # # add_exclusion_constraint :products, "price WITH =, availability_range WITH &&", using: :gist, name: "price_check" # # generates: # # ALTER TABLE "products" ADD CONSTRAINT price_check EXCLUDE USING gist (price WITH =, availability_range WITH &&) # # The +options+ hash can include the following keys: # [<tt>:name</tt>] # The constraint name. Defaults to <tt>excl_rails_<identifier></tt>. # [<tt>:deferrable</tt>] # Specify whether or not the exclusion constraint should be deferrable. Valid values are +false+ or +:immediate+ or +:deferred+ to specify the default behavior. Defaults to +false+. def add_exclusion_constraint(table_name, expression, **options) options = exclusion_constraint_options(table_name, expression, options) at = create_alter_table(table_name) at.add_exclusion_constraint(expression, options) execute schema_creation.accept(at) end def exclusion_constraint_options(table_name, expression, options) # :nodoc: assert_valid_deferrable(options[:deferrable]) options = options.dup options[:name] ||= exclusion_constraint_name(table_name, expression: expression, **options) options end # Removes the given exclusion constraint from the table. # # remove_exclusion_constraint :products, name: "price_check" # # The +expression+ parameter will be ignored if present. It can be helpful # to provide this in a migration's +change+ method so it can be reverted. # In that case, +expression+ will be used by #add_exclusion_constraint. def remove_exclusion_constraint(table_name, expression = nil, **options) excl_name_to_delete = exclusion_constraint_for!(table_name, expression: expression, **options).name at = create_alter_table(table_name) at.drop_exclusion_constraint(excl_name_to_delete) execute schema_creation.accept(at) end # Adds a new unique constraint to the table. # # add_unique_constraint :sections, [:position], deferrable: :deferred, name: "unique_position" # # generates: # # ALTER TABLE "sections" ADD CONSTRAINT unique_position UNIQUE (position) DEFERRABLE INITIALLY DEFERRED # # If you want to change an existing unique index to deferrable, you can use :using_index to create deferrable unique constraints. # # add_unique_constraint :sections, deferrable: :deferred, name: "unique_position", using_index: "index_sections_on_position" # # The +options+ hash can include the following keys: # [<tt>:name</tt>] # The constraint name. Defaults to <tt>uniq_rails_<identifier></tt>. # [<tt>:deferrable</tt>] # Specify whether or not the unique constraint should be deferrable. Valid values are +false+ or +:immediate+ or +:deferred+ to specify the default behavior. Defaults to +false+. # [<tt>:using_index</tt>] # To specify an existing unique index name. Defaults to +nil+. def add_unique_constraint(table_name, column_name = nil, **options) options = unique_constraint_options(table_name, column_name, options) at = create_alter_table(table_name) at.add_unique_constraint(column_name, options) execute schema_creation.accept(at) end def unique_constraint_options(table_name, column_name, options) # :nodoc: assert_valid_deferrable(options[:deferrable]) if column_name && options[:using_index] raise ArgumentError, "Cannot specify both column_name and :using_index options." end options = options.dup options[:name] ||= unique_constraint_name(table_name, column: column_name, **options) options end # Removes the given unique constraint from the table. # # remove_unique_constraint :sections, name: "unique_position" # # The +column_name+ parameter will be ignored if present. It can be helpful # to provide this in a migration's +change+ method so it can be reverted. # In that case, +column_name+ will be used by #add_unique_constraint. def remove_unique_constraint(table_name, column_name = nil, **options) unique_name_to_delete = unique_constraint_for!(table_name, column: column_name, **options).name at = create_alter_table(table_name) at.drop_unique_constraint(unique_name_to_delete) execute schema_creation.accept(at) end # Maps logical Rails types to PostgreSQL-specific data types. def type_to_sql(type, limit: nil, precision: nil, scale: nil, array: nil, enum_type: nil, **) # :nodoc: sql = case type.to_s when "binary" # PostgreSQL doesn't support limits on binary (bytea) columns. # The hard limit is 1GB, because of a 32-bit size field, and TOAST. case limit when nil, 0..0x3fffffff; super(type) else raise ArgumentError, "No binary type has byte size #{limit}. The limit on binary can be at most 1GB - 1byte." end when "text" # PostgreSQL doesn't support limits on text columns. # The hard limit is 1GB, according to section 8.3 in the manual. case limit when nil, 0..0x3fffffff; super(type) else raise ArgumentError, "No text type has byte size #{limit}. The limit on text can be at most 1GB - 1byte." end when "integer" case limit when 1, 2; "smallint" when nil, 3, 4; "integer" when 5..8; "bigint" else raise ArgumentError, "No integer type has byte size #{limit}. Use a numeric with scale 0 instead." end when "enum" raise ArgumentError, "enum_type is required for enums" if enum_type.nil? enum_type else super end sql = "#{sql}[]" if array && type != :primary_key sql end # PostgreSQL requires the ORDER BY columns in the select list for distinct queries, and # requires that the ORDER BY include the distinct column. def columns_for_distinct(columns, orders) # :nodoc: order_columns = orders.compact_blank.map { |s| # Convert Arel node to string s = visitor.compile(s) unless s.is_a?(String) # Remove any ASC/DESC modifiers s.gsub(/\s+(?:ASC|DESC)\b/, "") .gsub(/\s+NULLS\s+(?:FIRST|LAST)\b/, "") }.compact_blank.map.with_index { |column, i| "#{column} AS alias_#{i}" } (order_columns << super).join(", ") end def update_table_definition(table_name, base) # :nodoc: PostgreSQL::Table.new(table_name, base) end def create_schema_dumper(options) # :nodoc: PostgreSQL::SchemaDumper.create(self, options) end # Validates the given constraint. # # Validates the constraint named +constraint_name+ on +accounts+. # # validate_constraint :accounts, :constraint_name def validate_constraint(table_name, constraint_name) at = create_alter_table table_name at.validate_constraint constraint_name execute schema_creation.accept(at) end # Validates the given foreign key. # # Validates the foreign key on +accounts.branch_id+. # # validate_foreign_key :accounts, :branches # # Validates the foreign key on +accounts.owner_id+. # # validate_foreign_key :accounts, column: :owner_id # # Validates the foreign key named +special_fk_name+ on the +accounts+ table. # # validate_foreign_key :accounts, name: :special_fk_name # # The +options+ hash accepts the same keys as SchemaStatements#add_foreign_key. def validate_foreign_key(from_table, to_table = nil, **options) fk_name_to_validate = foreign_key_for!(from_table, to_table: to_table, **options).name validate_constraint from_table, fk_name_to_validate end # Validates the given check constraint. # # validate_check_constraint :products, name: "price_check" # # The +options+ hash accepts the same keys as add_check_constraint[rdoc-ref:ConnectionAdapters::SchemaStatements#add_check_constraint]. def validate_check_constraint(table_name, **options) chk_name_to_validate = check_constraint_for!(table_name, **options).name validate_constraint table_name, chk_name_to_validate end def foreign_key_column_for(table_name, column_name) # :nodoc: _schema, table_name = extract_schema_qualified_name(table_name) super end def add_index_options(table_name, column_name, **options) # :nodoc: if (where = options[:where]) && table_exists?(table_name) && column_exists?(table_name, where) options[:where] = quote_column_name(where) end super end def quoted_include_columns_for_index(column_names) # :nodoc: return quote_column_name(column_names) if column_names.is_a?(Symbol) quoted_columns = column_names.each_with_object({}) do |name, result| result[name.to_sym] = quote_column_name(name).dup end add_options_for_index_columns(quoted_columns).values.join(", ") end def schema_creation # :nodoc: PostgreSQL::SchemaCreation.new(self) end private def create_table_definition(name, **options) PostgreSQL::TableDefinition.new(self, name, **options) end def create_alter_table(name) PostgreSQL::AlterTable.new create_table_definition(name) end def new_column_from_field(table_name, field, _definitions) column_name, type, default, notnull, oid, fmod, collation, comment, identity, attgenerated = field type_metadata = fetch_type_metadata(column_name, type, oid.to_i, fmod.to_i) default_value = extract_value_from_default(default) if attgenerated.present? default_function = default else default_function = extract_default_function(default_value, default) end if match = default_function&.match(/\Anextval\('"?(?<sequence_name>.+_(?<suffix>seq\d*))"?'::regclass\)\z/) serial = sequence_name_from_parts(table_name, column_name, match[:suffix]) == match[:sequence_name] end PostgreSQL::Column.new( column_name, default_value, type_metadata, !notnull, default_function, collation: collation, comment: comment.presence, serial: serial, identity: identity.presence, generated: attgenerated ) end def fetch_type_metadata(column_name, sql_type, oid, fmod) cast_type = get_oid_type(oid, fmod, column_name, sql_type) simple_type = SqlTypeMetadata.new( sql_type: sql_type, type: cast_type.type, limit: cast_type.limit, precision: cast_type.precision, scale: cast_type.scale, ) PostgreSQL::TypeMetadata.new(simple_type, oid: oid, fmod: fmod) end def sequence_name_from_parts(table_name, column_name, suffix) over_length = [table_name, column_name, suffix].sum(&:length) + 2 - max_identifier_length if over_length > 0 column_name_length = [(max_identifier_length - suffix.length - 2) / 2, column_name.length].min over_length -= column_name.length - column_name_length column_name = column_name[0, column_name_length - [over_length, 0].min] end if over_length > 0 table_name = table_name[0, table_name.length - over_length] end "#{table_name}_#{column_name}_#{suffix}" end def extract_foreign_key_action(specifier) case specifier when "c"; :cascade when "n"; :nullify when "r"; :restrict end end def assert_valid_deferrable(deferrable) return if !deferrable || %(immediate deferred).include?(deferrable) raise ArgumentError, "deferrable must be `:immediate` or `:deferred`, got: `#{deferrable.inspect}`" end def extract_constraint_deferrable(deferrable, deferred) deferrable && (deferred ? :deferred : :immediate) end def reference_name_for_table(table_name) _schema, table_name = extract_schema_qualified_name(table_name.to_s) table_name.singularize end def add_column_for_alter(table_name, column_name, type, **options) return super unless options.key?(:comment) [super, Proc.new { change_column_comment(table_name, column_name, options[:comment]) }] end def change_column_for_alter(table_name, column_name, type, **options) change_col_def = build_change_column_definition(table_name, column_name, type, **options) sqls = [schema_creation.accept(change_col_def)] sqls << Proc.new { change_column_comment(table_name, column_name, options[:comment]) } if options.key?(:comment) sqls end def change_column_null_for_alter(table_name, column_name, null, default = nil) if default.nil? "ALTER COLUMN #{quote_column_name(column_name)} #{null ? 'DROP' : 'SET'} NOT NULL" else Proc.new { change_column_null(table_name, column_name, null, default) } end end def add_index_opclass(quoted_columns, **options) opclasses = options_for_index_columns(options[:opclass]) quoted_columns.each do |name, column| column << " #{opclasses[name]}" if opclasses[name].present? end end def add_options_for_index_columns(quoted_columns, **options) quoted_columns = add_index_opclass(quoted_columns, **options) super end def exclusion_constraint_name(table_name, **options) options.fetch(:name) do expression = options.fetch(:expression) identifier = "#{table_name}_#{expression}_excl" hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10) "excl_rails_#{hashed_identifier}" end end def exclusion_constraint_for(table_name, **options) excl_name = exclusion_constraint_name(table_name, **options) exclusion_constraints(table_name).detect { |excl| excl.name == excl_name } end def exclusion_constraint_for!(table_name, expression: nil, **options) exclusion_constraint_for(table_name, expression: expression, **options) || raise(ArgumentError, "Table '#{table_name}' has no exclusion constraint for #{expression || options}") end def unique_constraint_name(table_name, **options) options.fetch(:name) do column_or_index = Array(options[:column] || options[:using_index]).map(&:to_s) identifier = "#{table_name}_#{column_or_index * '_and_'}_unique" hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10) "uniq_rails_#{hashed_identifier}" end end def unique_constraint_for(table_name, **options) name = unique_constraint_name(table_name, **options) unless options.key?(:column) unique_constraints(table_name).detect { |unique_constraint| unique_constraint.defined_for?(name: name, **options) } end def unique_constraint_for!(table_name, column: nil, **options) unique_constraint_for(table_name, column: column, **options) || raise(ArgumentError, "Table '#{table_name}' has no unique constraint for #{column || options}") end def data_source_sql(name = nil, type: nil) scope = quoted_scope(name, type: type) scope[:type] ||= "'r','v','m','p','f'" # (r)elation/table, (v)iew, (m)aterialized view, (p)artitioned table, (f)oreign table sql = +"SELECT c.relname FROM pg_class c LEFT JOIN pg_namespace n ON n.oid = c.relnamespace" sql << " WHERE n.nspname = #{scope[:schema]}" sql << " AND c.relname = #{scope[:name]}" if scope[:name] sql << " AND c.relkind IN (#{scope[:type]})" sql end def quoted_scope(name = nil, type: nil) schema, name = extract_schema_qualified_name(name) type = case type when "BASE TABLE" "'r','p'" when "VIEW" "'v','m'" when "FOREIGN TABLE" "'f'" end scope = {} scope[:schema] = schema ? quote(schema) : "ANY (current_schemas(false))" scope[:name] = quote(name) if name scope[:type] = type if type scope end def extract_schema_qualified_name(string) name = Utils.extract_schema_qualified_name(string.to_s) [name.schema, name.identifier] end def column_names_from_column_numbers(table_oid, column_numbers) Hash[query(<<~SQL, "SCHEMA")].values_at(*column_numbers).compact SELECT a.attnum, a.attname FROM pg_attribute a WHERE a.attrelid = #{table_oid} AND a.attnum IN (#{column_numbers.join(", ")}) SQL end end end end end