Module: Gitlab::Database::MigrationHelpers

Includes:
AsyncConstraints::MigrationHelpers, AsyncIndexes::MigrationHelpers, DynamicModelHelpers, FeatureFlagMigratorHelpers, WraparoundVacuumHelpers, Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers, Gitlab::Database::Migrations::ConstraintsHelpers, Gitlab::Database::Migrations::ExtensionHelpers, Gitlab::Database::Migrations::ForeignKeyHelpers, Gitlab::Database::Migrations::LockRetriesHelpers, Gitlab::Database::Migrations::RedisHelpers, Gitlab::Database::Migrations::ReestablishedConnectionStack, Gitlab::Database::Migrations::SidekiqHelpers, Gitlab::Database::Migrations::TimeoutHelpers, PartitionHelpers, RenameTableHelpers
Included in:
CascadingNamespaceSettings, V2, WorkItems::Widgets, PartitioningMigrationHelpers::IndexHelpers, PartitioningMigrationHelpers::TableManagementHelpers, PartitioningMigrationHelpers::UniquenessHelpers
Defined in:
lib/gitlab/database/migration_helpers.rb,
lib/gitlab/database/migration_helpers/v2.rb,
lib/gitlab/database/migration_helpers/swapping.rb,
lib/gitlab/database/migration_helpers/announce_database.rb,
lib/gitlab/database/migration_helpers/convert_to_bigint.rb,
lib/gitlab/database/migration_helpers/work_items/widgets.rb,
lib/gitlab/database/migration_helpers/wraparound_autovacuum.rb,
lib/gitlab/database/migration_helpers/restrict_gitlab_schema.rb,
lib/gitlab/database/migration_helpers/loose_foreign_key_helpers.rb,
lib/gitlab/database/migration_helpers/wraparound_vacuum_helpers.rb,
lib/gitlab/database/migration_helpers/cascading_namespace_settings.rb,
lib/gitlab/database/migration_helpers/feature_flag_migrator_helpers.rb,
lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables.rb,
lib/gitlab/database/migration_helpers/require_disable_ddl_transaction_for_multiple_locks.rb

Defined Under Namespace

Modules: AnnounceDatabase, AutomaticLockWritesOnTables, CascadingNamespaceSettings, ConvertToBigint, FeatureFlagMigratorHelpers, LooseForeignKeyHelpers, RequireDisableDdlTransactionForMultipleLocks, RestrictGitlabSchema, Swapping, V2, WorkItems, WraparoundAutovacuum, WraparoundVacuumHelpers

Constant Summary collapse

INTEGER_IDS_YET_TO_INITIALIZED_TO_BIGINT_FILE_PATH =
'db/integer_ids_not_yet_initialized_to_bigint.yml'
TABLE_INT_IDS_YAML_FILE_COMMENT =
<<-MESSAGE.strip_heredoc
  # -- DON'T MANUALLY EDIT --
  # Contains the list of integer IDs which were converted to bigint for new installations in
  # https://gitlab.com/gitlab-org/gitlab/-/issues/438124, but they are still integers for existing instances.
  # On initialize_conversion_of_integer_to_bigint those integer IDs will be removed automatically from here.
MESSAGE
PENDING_INT_IDS_ERROR_MSG =
"'%{table}' table still has %{int_ids} integer IDs. "\
"Please include them in the 'columns' param and in your backfill migration. "\
"For more info: https://gitlab.com/gitlab-org/gitlab/-/issues/482470"
ENFORCE_INITIALIZE_ALL_INT_IDS_FROM_MILESTONE =
'17.4'
DEFAULT_TIMESTAMP_COLUMNS =
%i[created_at updated_at].freeze

Constants included from DynamicModelHelpers

DynamicModelHelpers::BATCH_SIZE

Constants included from Gitlab::Database::Migrations::RedisHelpers

Gitlab::Database::Migrations::RedisHelpers::SCAN_START_CURSOR

Constants included from Gitlab::Database::Migrations::SidekiqHelpers

Gitlab::Database::Migrations::SidekiqHelpers::DEFAULT_MAX_ATTEMPTS, Gitlab::Database::Migrations::SidekiqHelpers::DEFAULT_TIMES_IN_A_ROW

Constants included from Gitlab::Database::Migrations::ConstraintsHelpers

Gitlab::Database::Migrations::ConstraintsHelpers::MAX_IDENTIFIER_NAME_LENGTH

Constants included from Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers

Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::BATCH_CLASS_NAME, Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::BATCH_MIN_DELAY, Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::BATCH_MIN_VALUE, Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::BATCH_SIZE, Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::EARLY_FINALIZATION_ERROR, Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::ENFORCE_EARLY_FINALIZATION_FROM_VERSION, Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::MIGRATION_NOT_FOUND_MESSAGE, Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::MINIMUM_PAUSE_MS, Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::NonExistentMigrationError, Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers::SUB_BATCH_SIZE

Instance Method Summary collapse

Methods included from PartitionHelpers

#partition?, #table_partitioned?

Methods included from WraparoundVacuumHelpers

#check_if_wraparound_in_progress

Methods included from AsyncConstraints::MigrationHelpers

#prepare_async_check_constraint_validation, #prepare_async_foreign_key_validation, #prepare_partitioned_async_check_constraint_validation, #prepare_partitioned_async_foreign_key_validation, #unprepare_async_check_constraint_validation, #unprepare_async_foreign_key_validation, #unprepare_partitioned_async_check_constraint_validation, #unprepare_partitioned_async_foreign_key_validation

Methods included from AsyncIndexes::MigrationHelpers

#async_index_creation_available?, #prepare_async_index, #prepare_async_index_from_sql, #prepare_async_index_removal, #unprepare_async_index, #unprepare_async_index_by_name

Methods included from RenameTableHelpers

#finalize_table_rename, #rename_table_safely, #undo_finalize_table_rename, #undo_rename_table_safely

Methods included from FeatureFlagMigratorHelpers

#down_migrate_to_jsonb_setting, #down_migrate_to_setting, #up_migrate_to_jsonb_setting, #up_migrate_to_setting

Methods included from DynamicModelHelpers

define_batchable_model

Methods included from Gitlab::Database::Migrations::ForeignKeyHelpers

#add_concurrent_foreign_key, #concurrent_foreign_key_name, #foreign_key_exists?, #remove_foreign_key_if_exists, #remove_foreign_key_without_error, #validate_foreign_key

Methods included from Gitlab::Database::Migrations::LockRetriesHelpers

#with_lock_retries

Methods included from Gitlab::Database::Migrations::TimeoutHelpers

#disable_statement_timeout

Methods included from Gitlab::Database::Migrations::RedisHelpers

#queue_redis_migration_job

Methods included from Gitlab::Database::Migrations::SidekiqHelpers

#migrate_across_instance, #migrate_within_instance, #sidekiq_queue_migrate, #sidekiq_remove_jobs

Methods included from Gitlab::Database::Migrations::ExtensionHelpers

#create_extension, #drop_extension

Methods included from Gitlab::Database::Migrations::ConstraintsHelpers

#add_check_constraint, #add_multi_column_not_null_constraint, #add_not_null_constraint, #add_text_limit, #check_constraint_exists?, check_constraint_exists?, #check_constraint_name, #check_not_null_constraint_exists?, #check_text_limit_exists?, #copy_check_constraints, #drop_constraint, #remove_check_constraint, #remove_multi_column_not_null_constraint, #remove_not_null_constraint, #remove_text_limit, #rename_constraint, #switch_constraint_names, #text_limit_name, #validate_check_constraint, #validate_multi_column_not_null_constraint, #validate_not_null_constraint, #validate_text_limit

Methods included from Gitlab::Database::Migrations::BatchedBackgroundMigrationHelpers

#delete_batched_background_migration, #ensure_batched_background_migration_is_finished, #finalize_batched_background_migration, #gitlab_schema_from_context, #queue_batched_background_migration

Methods included from Gitlab::Database::Migrations::ReestablishedConnectionStack

#with_restored_connection_stack

Instance Method Details

#add_concurrent_index(table_name, column_name, options = {}) ⇒ Object

Creates a new index, concurrently

Example:

add_concurrent_index :users, :some_column

See Rails’ ‘add_index` for more info on the available arguments.



113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
# File 'lib/gitlab/database/migration_helpers.rb', line 113

def add_concurrent_index(table_name, column_name, options = {})
  if transaction_open?
    raise 'add_concurrent_index can not be run inside a transaction, ' \
      'you can disable transactions by calling disable_ddl_transaction! ' \
      'in the body of your migration class'
  end

  if !options.delete(:allow_partition) && partition?(table_name)
    raise ArgumentError, 'add_concurrent_index can not be used on a partitioned '  \
      'table. Please use add_concurrent_partitioned_index on the partitioned table ' \
      'as we need to create indexes on each partition and an index on the parent table'
  end

  options = options.merge({ algorithm: :concurrently })

  if index_exists?(table_name, column_name, **options)
    name = options[:name] || index_name(table_name, column_name)
    _, schema = table_name.to_s.split('.').reverse

    if index_invalid?(name, schema: schema)
      say "Index being recreated because the existing version was INVALID: table_name: #{table_name}, column_name: #{column_name}"

      remove_concurrent_index_by_name(table_name, name)
    else
      say "Index not created because it already exists (this may be due to an aborted migration or similar): table_name: #{table_name}, column_name: #{column_name}"

      return
    end
  end

  disable_statement_timeout do
    add_index(table_name, column_name, **options)
  end

  # We created this index. Now let's remove the queuing entry for async creation in case it's still there.
  unprepare_async_index(table_name, column_name, **options)
end

#add_primary_key_using_index(table_name, pk_name, index_to_use) ⇒ Object



923
924
925
926
927
# File 'lib/gitlab/database/migration_helpers.rb', line 923

def add_primary_key_using_index(table_name, pk_name, index_to_use)
  execute <<~SQL
    ALTER TABLE #{quote_table_name(table_name)} ADD CONSTRAINT #{quote_table_name(pk_name)} PRIMARY KEY USING INDEX #{quote_table_name(index_to_use)}
  SQL
end

#add_sequence(table_name, column_name, sequence_name, start_value) ⇒ Object



944
945
946
947
948
949
# File 'lib/gitlab/database/migration_helpers.rb', line 944

def add_sequence(table_name, column_name, sequence_name, start_value)
  execute <<~SQL
    CREATE SEQUENCE #{quote_table_name(sequence_name)} START #{start_value};
    ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} SET DEFAULT nextval(#{quote(sequence_name)})
  SQL
end

#add_timestamps_with_timezone(table_name, options = {}) ⇒ Object

Adds ‘created_at` and `updated_at` columns with timezone information.

This method is an improved version of Rails’ built-in method ‘add_timestamps`.

By default, adds ‘created_at` and `updated_at` columns, but these can be specified as:

add_timestamps_with_timezone(:my_table, columns: [:created_at, :deleted_at])

This allows you to create just the timestamps you need, saving space.

Available options are:

:default - The default value for the column.
:null - When set to `true` the column will allow NULL values.
      The default is to not allow NULL values.
:columns - the column names to create. Must end with `_at`.
           Default value: `DEFAULT_TIMESTAMP_COLUMNS`

All options are optional.



75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
# File 'lib/gitlab/database/migration_helpers.rb', line 75

def add_timestamps_with_timezone(table_name, options = {})
  columns = options.fetch(:columns, DEFAULT_TIMESTAMP_COLUMNS)

  columns.each do |column_name|
    validate_timestamp_column_name!(column_name)

    add_column(
      table_name,
      column_name,
      :datetime_with_timezone,
      default: options[:default],
      null: options[:null] || false
    )
  end
end

#backfill_conversion_of_integer_to_bigint(table, columns, primary_key: :id, batch_size: 20_000, sub_batch_size: 1000, pause_ms: 100, interval: 2.minutes) ⇒ Object

Backfills the new columns used in an integer-to-bigint conversion using background migrations.

  • This helper should be called from a post-deployment migration.

  • In order for this helper to work properly, the new columns must be first initialized with the ‘initialize_conversion_of_integer_to_bigint` helper.

  • It tracks the scheduled background jobs through Gitlab::Database::BackgroundMigration::BatchedMigration, which allows a more thorough check that all jobs succeeded in the cleanup migration and is way faster for very large tables.

    Note: this helper is intended to be used in a post-deployment migration, to ensure any new code is deployed (including background job changes) before we begin processing the background migration.

    This helper is part 2 of a multi-step migration process:

    1. initialize_conversion_of_integer_to_bigint to create the new columns and database trigger

    2. backfill_conversion_of_integer_to_bigint to copy historic data using background migrations

    3. remaining steps TBD, see #288005

table - The name of the database table containing the column columns - The name, or an array of names, of the column(s) we want to convert to bigint. primary_key - The name of the primary key column (most often :id) batch_size - The number of rows to schedule in a single background migration sub_batch_size - The smaller batches that will be used by each scheduled job

to update the table. Useful to keep each update at ~100ms while executing
more updates per interval (2.minutes)
Note that each execution of a sub-batch adds a constant 100ms sleep
 time in between the updates, which must be taken into account
 while calculating the batch, sub_batch and interval values.

interval - The time interval between every background migration

example: Assume that we have figured out that updating 200 records of the events

table takes ~100ms on average.

We can set the sub_batch_size to 200, leave the interval to the default

and set the batch_size to 50_000 which will require
~50s = (50000 / 200) * (0.1 + 0.1) to complete and leaves breathing space
between the scheduled jobs


702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
# File 'lib/gitlab/database/migration_helpers.rb', line 702

def backfill_conversion_of_integer_to_bigint(
  table,
  columns,
  primary_key: :id,
  batch_size: 20_000,
  sub_batch_size: 1000,
  pause_ms: 100,
  interval: 2.minutes
)
  Gitlab::Database::Migrations::Conversions::BigintConverter
    .new(self, table, columns)
    .backfill(
      primary_key: primary_key,
      batch_size: batch_size,
      sub_batch_size: sub_batch_size,
      pause_ms: pause_ms,
      job_interval: interval
    )
end

#backfill_iids(table) ⇒ Object

Note this should only be used with very small tables



910
911
912
913
914
915
916
917
918
919
920
921
# File 'lib/gitlab/database/migration_helpers.rb', line 910

def backfill_iids(table)
  sql = <<-END
    UPDATE #{table}
    SET iid = #{table}_with_calculated_iid.iid_num
    FROM (
      SELECT id, ROW_NUMBER() OVER (PARTITION BY project_id ORDER BY id ASC) AS iid_num FROM #{table}
    ) AS #{table}_with_calculated_iid
    WHERE #{table}.id = #{table}_with_calculated_iid.id
  END

  execute(sql)
end

#change_column_type_concurrently(table, column, new_type, type_cast_function: nil, batch_column_name: :id) ⇒ Object

Changes the type of a column concurrently.

table - The table containing the column. column - The name of the column to change. new_type - The new column type.



456
457
458
459
460
# File 'lib/gitlab/database/migration_helpers.rb', line 456

def change_column_type_concurrently(table, column, new_type, type_cast_function: nil, batch_column_name: :id)
  temp_column = "#{column}_for_type_change"

  rename_column_concurrently(table, column, temp_column, type: new_type, type_cast_function: type_cast_function, batch_column_name: batch_column_name)
end

#check_trigger_permissions!(table) ⇒ Object



843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
# File 'lib/gitlab/database/migration_helpers.rb', line 843

def check_trigger_permissions!(table)
  unless Grant.create_and_execute_trigger?(table)
    dbname = ApplicationRecord.database.database_name
    user = ApplicationRecord.database.username

    raise <<-EOF
Your database user is not allowed to create, drop, or execute triggers on the
table #{table}.

If you are using PostgreSQL you can solve this by logging in to the GitLab
database (#{dbname}) using a super user and running:

    ALTER #{user} WITH SUPERUSER

This query will grant the user super user permissions, ensuring you don't run
into similar problems in the future (e.g. when new tables are created).
    EOF
  end
end

#cleanup_concurrent_column_rename(table, old, new) ⇒ Object

Cleans up a concurrent column name.

This method takes care of removing previously installed triggers as well as removing the old column.

table - The name of the database table. old - The name of the old column. new - The name of the new column.



558
559
560
561
562
563
564
565
566
# File 'lib/gitlab/database/migration_helpers.rb', line 558

def cleanup_concurrent_column_rename(table, old, new)
  trigger_name = rename_trigger_name(table, old, new)

  check_trigger_permissions!(table)

  remove_rename_triggers(table, trigger_name)

  remove_column(table, old)
end

#cleanup_concurrent_column_type_change(table, column, temp_column: nil) ⇒ Object

Performs cleanup of a concurrent type change.

table - The table containing the column. column - The name of the column to change. new_type - The new column type.



477
478
479
480
481
482
483
484
485
486
# File 'lib/gitlab/database/migration_helpers.rb', line 477

def cleanup_concurrent_column_type_change(table, column, temp_column: nil)
  temp_column ||= "#{column}_for_type_change"

  transaction do
    # This has to be performed in a transaction as otherwise we might have
    # inconsistent data.
    cleanup_concurrent_column_rename(table, column, temp_column)
    rename_column(table, temp_column, column)
  end
end

#cleanup_conversion_of_integer_to_bigint(table, columns) ⇒ Object

Similar to ‘revert_initialize_conversion_of_integer_to_bigint`, but `cleanup_conversion_of_integer_to_bigint` updates the yaml file



649
650
651
652
653
# File 'lib/gitlab/database/migration_helpers.rb', line 649

def cleanup_conversion_of_integer_to_bigint(table, columns)
  Gitlab::Database::Migrations::Conversions::BigintConverter
    .new(self, table, columns)
    .cleanup
end

#column_for(table, name) ⇒ Object

Returns the column for the given table and column name.



821
822
823
824
825
826
827
828
# File 'lib/gitlab/database/migration_helpers.rb', line 821

def column_for(table, name)
  name = name.to_s

  column = columns(table).find { |column| column.name == name }
  raise(missing_schema_object_message(table, "column", name)) if column.nil?

  column
end

#convert_to_bigint_column(column) ⇒ Object



602
603
604
# File 'lib/gitlab/database/migration_helpers.rb', line 602

def convert_to_bigint_column(column)
  "#{column}_convert_to_bigint"
end

#convert_to_type_column(column, from_type, to_type) ⇒ Object



598
599
600
# File 'lib/gitlab/database/migration_helpers.rb', line 598

def convert_to_type_column(column, from_type, to_type)
  "#{column}_convert_#{from_type}_to_#{to_type}"
end

#copy_foreign_keys(table, old, new) ⇒ Object

Copies all foreign keys for the old column to the new column.

table - The table containing the columns and indexes. old - The old column. new - The new column.



811
812
813
814
815
816
817
818
# File 'lib/gitlab/database/migration_helpers.rb', line 811

def copy_foreign_keys(table, old, new)
  foreign_keys_for(table, old).each do |fk|
    add_concurrent_foreign_key(fk.from_table,
      fk.to_table,
      column: new,
      on_delete: fk.on_delete)
  end
end

#copy_indexes(table, old, new) ⇒ Object

Copies all indexes for the old column to a new column.

table - The table containing the columns and indexes. old - The old column. new - The new column.



759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
# File 'lib/gitlab/database/migration_helpers.rb', line 759

def copy_indexes(table, old, new)
  old = old.to_s
  new = new.to_s

  indexes_for(table, old).each do |index|
    new_columns = index.columns.map do |column|
      column == old ? new : column
    end

    # This is necessary as we can't properly rename indexes such as
    # "ci_taggings_idx".
    unless index.name.include?(old)
      raise "The index #{index.name} can not be copied as it does not "\
        "mention the old column. You have to rename this index manually first."
    end

    name = index.name.gsub(old, new)

    if name.length > 63
      digest = Digest::SHA256.hexdigest(name).first(10)
      name = "idx_copy_#{digest}"
    end

    options = {
      unique: index.unique,
      name: name,
      length: index.lengths,
      order: index.orders
    }

    options[:using] = index.using if index.using
    options[:where] = index.where if index.where

    unless index.opclasses.blank?
      opclasses = index.opclasses.dup

      # Copy the operator classes for the old column (if any) to the new
      # column.
      opclasses[new] = opclasses.delete(old) if opclasses[old]

      options[:opclass] = opclasses
    end

    add_concurrent_index(table, new_columns, options)
  end
end

#create_or_update_plan_limit(limit_name, plan_name, limit_value) ⇒ Object



895
896
897
898
899
900
901
902
903
904
905
906
907
# File 'lib/gitlab/database/migration_helpers.rb', line 895

def create_or_update_plan_limit(limit_name, plan_name, limit_value)
  limit_name_quoted = quote_column_name(limit_name)
  plan_name_quoted = quote(plan_name)
  limit_value_quoted = quote(limit_value)

  ::Gitlab::Database.allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/519892') do
    execute <<~SQL
      INSERT INTO plan_limits (plan_id, #{limit_name_quoted})
      SELECT id, #{limit_value_quoted} FROM plans WHERE name = #{plan_name_quoted} LIMIT 1
      ON CONFLICT (plan_id) DO UPDATE SET #{limit_name_quoted} = EXCLUDED.#{limit_name_quoted};
    SQL
  end
end

#define_batchable_model(table_name, connection: self.connection, primary_key: nil, base_class: ActiveRecord::Base) ⇒ Object



40
41
42
43
44
45
46
47
# File 'lib/gitlab/database/migration_helpers.rb', line 40

def define_batchable_model(
  table_name,
  connection: self.connection,
  primary_key: nil,
  base_class: ActiveRecord::Base
)
  super
end

#drop_sequence(table_name, column_name, sequence_name) ⇒ Object



937
938
939
940
941
942
# File 'lib/gitlab/database/migration_helpers.rb', line 937

def drop_sequence(table_name, column_name, sequence_name)
  execute <<~SQL
    ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} DROP DEFAULT;
    DROP SEQUENCE IF EXISTS #{quote_table_name(sequence_name)}
  SQL
end

#each_batch(table_name, connection: self.connection, **kwargs) ⇒ Object



49
50
51
# File 'lib/gitlab/database/migration_helpers.rb', line 49

def each_batch(table_name, connection: self.connection, **kwargs)
  super(table_name, connection: connection, **kwargs)
end

#each_batch_range(table_name, connection: self.connection, **kwargs) ⇒ Object



53
54
55
# File 'lib/gitlab/database/migration_helpers.rb', line 53

def each_batch_range(table_name, connection: self.connection, **kwargs)
  super(table_name, connection: connection, **kwargs)
end

#ensure_backfill_conversion_of_integer_to_bigint_is_finished(table, columns, primary_key: :id) ⇒ Object

Handy helper to ensure data finalization for bigint conversion process



723
724
725
726
727
# File 'lib/gitlab/database/migration_helpers.rb', line 723

def ensure_backfill_conversion_of_integer_to_bigint_is_finished(table, columns, primary_key: :id)
  Gitlab::Database::Migrations::Conversions::BigintConverter
    .new(self, table, columns)
    .ensure_backfill(primary_key: primary_key)
end

#false_valueObject



246
247
248
# File 'lib/gitlab/database/migration_helpers.rb', line 246

def false_value
  Database.false_value
end

#foreign_keys_for(table, column) ⇒ Object

Returns an Array containing the foreign keys for the given column.



748
749
750
751
752
# File 'lib/gitlab/database/migration_helpers.rb', line 748

def foreign_keys_for(table, column)
  column = column.to_s

  foreign_keys(table).select { |fk| fk.column == column }
end

#index_exists_by_name?(table, index) ⇒ Boolean

Fetches indexes on a column by name for postgres.

This will include indexes using an expression on the column, for example: ‘CREATE INDEX CONCURRENTLY index_name ON table (LOWER(column));`

We can remove this when upgrading to Rails 5 with an updated ‘index_exists?`:

Or this can be removed when we no longer support postgres < 9.5, so we can use ‘CREATE INDEX IF NOT EXISTS`.

Returns:

  • (Boolean)


873
874
875
876
877
878
879
880
881
# File 'lib/gitlab/database/migration_helpers.rb', line 873

def index_exists_by_name?(table, index)
  # We can't fall back to the normal `index_exists?` method because that
  # does not find indexes without passing a column name.
  if indexes(table).map(&:name).include?(index.to_s)
    true
  else
    postgres_exists_by_name?(table, index)
  end
end

#index_invalid?(index_name, schema: nil) ⇒ Boolean

Returns:

  • (Boolean)


151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
# File 'lib/gitlab/database/migration_helpers.rb', line 151

def index_invalid?(index_name, schema: nil)
  index_name = connection.quote(index_name)
  schema = connection.quote(schema) if schema
  schema ||= 'current_schema()'

  connection.select_value(<<~SQL)
    select not i.indisvalid
    from pg_class c
    inner join pg_index i
      on c.oid = i.indexrelid
    inner join pg_namespace n
      on n.oid = c.relnamespace
    where n.nspname = #{schema}
      and c.relname = #{index_name}
  SQL
end

#indexes_for(table, column) ⇒ Object

Returns an Array containing the indexes for the given column



741
742
743
744
745
# File 'lib/gitlab/database/migration_helpers.rb', line 741

def indexes_for(table, column)
  column = column.to_s

  indexes(table).select { |index| index.columns.include?(column) }
end

#initialize_conversion_of_integer_to_bigint(table, columns, primary_key: :id) ⇒ Object

Initializes the conversion of a set of integer columns to bigint

It can be used for converting both a Primary Key and any Foreign Keys that may reference it or any other integer column that we may want to upgrade (e.g. columns that store IDs, but are not set as FKs).

  • For primary keys and Foreign Keys (or other columns) defined as NOT NULL,

    the new bigint column is added with a hardcoded NOT NULL DEFAULT 0
    which allows us to skip a very costly verification step once we
    are ready to switch it.
    

    This is crucial for Primary Key conversions, because setting a column

    as the PK converts even check constraints to NOT NULL constraints
    and forces an inline re-verification of the whole table.
    
  • It sets up a trigger to keep the two columns in sync.

    Note: this helper is intended to be used in a regular (pre-deployment) migration.

    This helper is part 1 of a multi-step migration process:

    1. initialize_conversion_of_integer_to_bigint to create the new columns and database trigger

    2. backfill_conversion_of_integer_to_bigint to copy historic data using background migrations

    3. remaining steps TBD, see #288005

table - The name of the database table containing the column columns - The name, or array of names, of the column(s) that we want to convert to bigint. primary_key - The name of the primary key column (most often :id)



631
632
633
634
635
# File 'lib/gitlab/database/migration_helpers.rb', line 631

def initialize_conversion_of_integer_to_bigint(table, columns, primary_key: :id) # rubocop:disable Lint/UnusedMethodArgument -- for backward compatibility, don't remove primary_key
  Gitlab::Database::Migrations::Conversions::BigintConverter
    .new(self, table, columns)
    .init
end

#install_rename_triggers(table, old, new, trigger_name: nil) ⇒ Object

Installs triggers in a table that keep a new column in sync with an old one.

table - The name of the table to install the trigger in. old_column - The name of the old column. new_column - The name of the new column. trigger_name - The name of the trigger to use (optional).



415
416
417
# File 'lib/gitlab/database/migration_helpers.rb', line 415

def install_rename_triggers(table, old, new, trigger_name: nil)
  Gitlab::Database::UnidirectionalCopyTrigger.on_table(table, connection: connection).create(old, new, trigger_name: trigger_name)
end

#install_sharding_key_assignment_trigger(**args) ⇒ Object

Installs a trigger in a table that assigns a sharding key from an associated table.

table: The table to install the trigger in. sharding_key: The column to be assigned on ‘table`. parent_table: The associated table with the sharding key to be copied. parent_sharding_key: The sharding key on the parent table that will be copied to `sharding_key` on `table`. foreign_key: The column used to fetch the relevant record from `parent_table`.



436
437
438
# File 'lib/gitlab/database/migration_helpers.rb', line 436

def install_sharding_key_assignment_trigger(**args)
  Gitlab::Database::Triggers::AssignDesiredShardingKey.new(**args.merge(connection: connection)).create
end

#lock_tables(*tables, mode: :access_exclusive, only: nil, nowait: nil) ⇒ Object



964
965
966
967
968
969
970
971
972
973
# File 'lib/gitlab/database/migration_helpers.rb', line 964

def lock_tables(*tables, mode: :access_exclusive, only: nil, nowait: nil)
  only_param = only && 'ONLY'
  nowait_param = nowait && 'NOWAIT'
  tables_param = tables.map { |t| quote_table_name(t) }.join(', ')
  mode_param = mode.to_s.upcase.tr('_', ' ')

  execute(<<~SQL.squish)
    LOCK TABLE #{only_param} #{tables_param} IN #{mode_param} MODE #{nowait_param}
  SQL
end

#postgres_exists_by_name?(table, name) ⇒ Boolean

Returns:

  • (Boolean)


883
884
885
886
887
888
889
890
891
892
893
# File 'lib/gitlab/database/migration_helpers.rb', line 883

def postgres_exists_by_name?(table, name)
  index_sql = <<~SQL
    SELECT COUNT(*)
    FROM pg_catalog.pg_indexes
    WHERE schemaname = #{connection.quote(current_schema)}
      AND tablename = #{connection.quote(table)}
      AND indexname = #{connection.quote(name)}
  SQL

  connection.select_value(index_sql).to_i > 0
end

#remove_column_default(table_name, column_name) ⇒ Object

While it is safe to call ‘change_column_default` on a column without default it would still require access exclusive lock on the table and for tables with high autovacuum(wraparound prevention) it will fail if their executions overlap.



956
957
958
959
960
961
962
# File 'lib/gitlab/database/migration_helpers.rb', line 956

def remove_column_default(table_name, column_name)
  column = connection.columns(table_name).find { |col| col.name == column_name.to_s }

  if column.default || column.default_function
    change_column_default(table_name, column_name, to: nil)
  end
end

#remove_concurrent_index(table_name, column_name, options = {}) ⇒ Object

Removes an existed index, concurrently

Example:

remove_concurrent_index :users, :some_column

See Rails’ ‘remove_index` for more info on the available arguments.



175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
# File 'lib/gitlab/database/migration_helpers.rb', line 175

def remove_concurrent_index(table_name, column_name, options = {})
  if transaction_open?
    raise 'remove_concurrent_index can not be run inside a transaction, ' \
      'you can disable transactions by calling disable_ddl_transaction! ' \
      'in the body of your migration class'
  end

  if partition?(table_name)
    raise ArgumentError, 'remove_concurrent_index can not be used on a partitioned '  \
      'table. Please use remove_concurrent_partitioned_index_by_name on the partitioned table ' \
      'as we need to remove the index on the parent table'
  end

  options = options.merge({ algorithm: :concurrently })

  unless index_exists?(table_name, column_name, **options)
    Gitlab::AppLogger.warn "Index not removed because it does not exist (this may be due to an aborted migration or similar): table_name: #{table_name}, column_name: #{column_name}"
    return
  end

  disable_statement_timeout do
    remove_index(table_name, **options.merge({ column: column_name }))
  end

  # We removed this index. Now let's make sure it's not queued for async creation.
  unprepare_async_index(table_name, column_name, **options)
end

#remove_concurrent_index_by_name(table_name, index_name, options = {}) ⇒ Object

Removes an existing index, concurrently

Example:

remove_concurrent_index :users, "index_X_by_Y"

See Rails’ ‘remove_index` for more info on the available arguments.



210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
# File 'lib/gitlab/database/migration_helpers.rb', line 210

def remove_concurrent_index_by_name(table_name, index_name, options = {})
  if transaction_open?
    raise 'remove_concurrent_index_by_name can not be run inside a transaction, ' \
      'you can disable transactions by calling disable_ddl_transaction! ' \
      'in the body of your migration class'
  end

  if partition?(table_name)
    raise ArgumentError, 'remove_concurrent_index_by_name can not be used on a partitioned '  \
      'table. Please use remove_concurrent_partitioned_index_by_name on the partitioned table ' \
      'as we need to remove the index on the parent table'
  end

  index_name = index_name[:name] if index_name.is_a?(Hash)

  raise 'remove_concurrent_index_by_name must get an index name as the second argument' if index_name.blank?

  options = options.merge({ algorithm: :concurrently })

  unless index_exists_by_name?(table_name, index_name)
    Gitlab::AppLogger.warn "Index not removed because it does not exist (this may be due to an aborted migration or similar): table_name: #{table_name}, index_name: #{index_name}"
    return
  end

  disable_statement_timeout do
    remove_index(table_name, **options.merge({ name: index_name }))
  end

  # We removed this index. Now let's make sure it's not queued for async creation.
  unprepare_async_index_by_name(table_name, index_name, **options)
end

#remove_rename_triggers(table, trigger) ⇒ Object

Removes the triggers used for renaming a column concurrently.



420
421
422
# File 'lib/gitlab/database/migration_helpers.rb', line 420

def remove_rename_triggers(table, trigger)
  Gitlab::Database::UnidirectionalCopyTrigger.on_table(table, connection: connection).drop(trigger)
end

#remove_sharding_key_assignment_trigger(**args) ⇒ Object

Removes trigger used for assigning sharding keys.

table: The table to install the trigger in. sharding_key: The column to be assigned on ‘table`. parent_table: The associated table with the sharding key to be copied. parent_sharding_key: The sharding key on the parent table that will be copied to `sharding_key` on `table`. foreign_key: The column used to fetch the relevant record from `parent_table`.



447
448
449
# File 'lib/gitlab/database/migration_helpers.rb', line 447

def remove_sharding_key_assignment_trigger(**args)
  Gitlab::Database::Triggers::AssignDesiredShardingKey.new(**args.merge(connection: connection)).drop
end

#remove_timestamps(table_name, options = {}) ⇒ Object

To be used in the ‘#down` method of migrations that use `#add_timestamps_with_timezone`.

Available options are:

:columns - the column names to remove. Must be one
           Default value: `DEFAULT_TIMESTAMP_COLUMNS`

All options are optional.



99
100
101
102
103
104
# File 'lib/gitlab/database/migration_helpers.rb', line 99

def remove_timestamps(table_name, options = {})
  columns = options.fetch(:columns, DEFAULT_TIMESTAMP_COLUMNS)
  columns.each do |column_name|
    remove_column(table_name, column_name)
  end
end

#rename_column_concurrently(table, old, new, type: nil, type_cast_function: nil, batch_column_name: :id) ⇒ Object

Renames a column without requiring downtime.

Concurrent renames work by using database triggers to ensure both the old and new column are in sync. However, this method will not remove the triggers or the old column automatically; this needs to be done manually in a post-deployment migration. This can be done using the method ‘cleanup_concurrent_column_rename`.

table - The name of the database table containing the column. old - The old column name. new - The new column name. type - The type of the new column. If no type is given the old column’s

type is used.

batch_column_name - option is for tables without primary key, in this

case another unique integer column can be used. Example: :user_id


374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
# File 'lib/gitlab/database/migration_helpers.rb', line 374

def rename_column_concurrently(table, old, new, type: nil, type_cast_function: nil, batch_column_name: :id)
  unless column_exists?(table, batch_column_name)
    raise "Column #{batch_column_name} does not exist on #{table}"
  end

  if transaction_open?
    raise 'rename_column_concurrently can not be run inside a transaction'
  end

  check_trigger_permissions!(table)

  create_column_from(table, old, new, type: type, batch_column_name: batch_column_name, type_cast_function: type_cast_function)

  install_rename_triggers(table, old, new)
end

#rename_trigger_name(table, old, new) ⇒ Object

Returns the (base) name to use for triggers when renaming columns.



425
426
427
# File 'lib/gitlab/database/migration_helpers.rb', line 425

def rename_trigger_name(table, old, new)
  Gitlab::Database::UnidirectionalCopyTrigger.on_table(table, connection: connection).name(old, new)
end

#replace_sql(column, pattern, replacement) ⇒ Object

This will replace the first occurrence of a string in a column with the replacement using ‘regexp_replace`



832
833
834
835
836
837
838
839
840
841
# File 'lib/gitlab/database/migration_helpers.rb', line 832

def replace_sql(column, pattern, replacement)
  quoted_pattern = Arel::Nodes::Quoted.new(pattern.to_s)
  quoted_replacement = Arel::Nodes::Quoted.new(replacement.to_s)

  replace = Arel::Nodes::NamedFunction.new(
    "regexp_replace", [column, quoted_pattern, quoted_replacement]
  )

  Arel::Nodes::SqlLiteral.new(replace.to_sql)
end

#restore_conversion_of_integer_to_bigint(table, columns, primary_key: :id) ⇒ Object

Reverts ‘cleanup_conversion_of_integer_to_bigint`

table - The name of the database table containing the columns columns - The name, or array of names, of the column(s) that we have converted to bigint. primary_key - The name of the primary key column (most often :id)



660
661
662
663
664
# File 'lib/gitlab/database/migration_helpers.rb', line 660

def restore_conversion_of_integer_to_bigint(table, columns, primary_key: :id) # rubocop:disable Lint/UnusedMethodArgument -- for backward compatibility, don't remove primary_key
  Gitlab::Database::Migrations::Conversions::BigintConverter
    .new(self, table, columns)
    .restore_cleanup
end

#revert_backfill_conversion_of_integer_to_bigint(table, columns, primary_key: :id) ⇒ Object

Reverts ‘backfill_conversion_of_integer_to_bigint`

table - The name of the database table containing the column columns - The name, or an array of names, of the column(s) we want to convert to bigint. primary_key - The name of the primary key column (most often :id)



734
735
736
737
738
# File 'lib/gitlab/database/migration_helpers.rb', line 734

def revert_backfill_conversion_of_integer_to_bigint(table, columns, primary_key: :id)
  Gitlab::Database::Migrations::Conversions::BigintConverter
    .new(self, table, columns)
    .revert_backfill(primary_key: primary_key)
end

#revert_initialize_conversion_of_integer_to_bigint(table, columns) ⇒ Object

Reverts ‘initialize_conversion_of_integer_to_bigint`

table - The name of the database table containing the columns columns - The name, or array of names, of the column(s) that we’re converting to bigint.



641
642
643
644
645
# File 'lib/gitlab/database/migration_helpers.rb', line 641

def revert_initialize_conversion_of_integer_to_bigint(table, columns)
  Gitlab::Database::Migrations::Conversions::BigintConverter
    .new(self, table, columns)
    .revert_init
end

#swap_primary_key(table_name, primary_key_name, index_to_use) ⇒ Object Also known as: unswap_primary_key



929
930
931
932
933
934
# File 'lib/gitlab/database/migration_helpers.rb', line 929

def swap_primary_key(table_name, primary_key_name, index_to_use)
  with_lock_retries(raise_on_exhaustion: true) do
    drop_constraint(table_name, primary_key_name, cascade: true)
    add_primary_key_using_index(table_name, primary_key_name, index_to_use)
  end
end

#table_integer_idsObject



975
976
977
# File 'lib/gitlab/database/migration_helpers.rb', line 975

def table_integer_ids
  YAML.safe_load_file(File.join(INTEGER_IDS_YET_TO_INITIALIZED_TO_BIGINT_FILE_PATH))
end

#true_valueObject



242
243
244
# File 'lib/gitlab/database/migration_helpers.rb', line 242

def true_value
  Database.true_value
end

#undo_change_column_type_concurrently(table, column) ⇒ Object

Reverses operations performed by change_column_type_concurrently.

table - The table containing the column. column - The name of the column to change.



466
467
468
469
470
# File 'lib/gitlab/database/migration_helpers.rb', line 466

def undo_change_column_type_concurrently(table, column)
  temp_column = "#{column}_for_type_change"

  undo_rename_column_concurrently(table, column, temp_column)
end

#undo_cleanup_concurrent_column_rename(table, old, new, type: nil, batch_column_name: :id) ⇒ Object

Reverses the operations performed by cleanup_concurrent_column_rename.

This method adds back the old_column removed by cleanup_concurrent_column_rename. It also adds back the (old_column > new_column) trigger that is removed by cleanup_concurrent_column_rename.

table - The name of the database table containing the column. old - The old column name. new - The new column name. type - The type of the old column. If no type is given the new column’s

type is used.

batch_column_name - option is for tables without primary key, in this

case another unique integer column can be used. Example: :user_id


582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
# File 'lib/gitlab/database/migration_helpers.rb', line 582

def undo_cleanup_concurrent_column_rename(table, old, new, type: nil, batch_column_name: :id)
  unless column_exists?(table, batch_column_name)
    raise "Column #{batch_column_name} does not exist on #{table}"
  end

  if transaction_open?
    raise 'undo_cleanup_concurrent_column_rename can not be run inside a transaction'
  end

  check_trigger_permissions!(table)

  create_column_from(table, new, old, type: type, batch_column_name: batch_column_name)

  install_rename_triggers(table, old, new)
end

#undo_cleanup_concurrent_column_type_change(table, column, old_type, type_cast_function: nil, batch_column_name: :id, limit: nil, temp_column: nil) ⇒ Object

Reverses operations performed by cleanup_concurrent_column_type_change.

table - The table containing the column. column - The name of the column to change. old_type - The type of the original column used with change_column_type_concurrently. type_cast_function - Required if the conversion back to the original type is not automatic batch_column_name - option for tables without a primary key, in this case

another unique integer column can be used. Example: :user_id


496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
# File 'lib/gitlab/database/migration_helpers.rb', line 496

def undo_cleanup_concurrent_column_type_change(table, column, old_type, type_cast_function: nil, batch_column_name: :id, limit: nil, temp_column: nil)
  Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas.require_ddl_mode!

  temp_column ||= "#{column}_for_type_change"

  # Using a descriptive name that includes orinal column's name risks
  # taking us above the 63 character limit, so we use a hash
  identifier = "#{table}_#{column}_for_type_change"
  hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
  temp_undo_cleanup_column = "tmp_undo_cleanup_column_#{hashed_identifier}"

  unless column_exists?(table, batch_column_name)
    raise "Column #{batch_column_name} does not exist on #{table}"
  end

  if transaction_open?
    raise 'undo_cleanup_concurrent_column_type_change can not be run inside a transaction'
  end

  check_trigger_permissions!(table)

  begin
    create_column_from(
      table,
      column,
      temp_undo_cleanup_column,
      type: old_type,
      batch_column_name: batch_column_name,
      type_cast_function: type_cast_function,
      limit: limit
    )

    transaction do
      # This has to be performed in a transaction as otherwise we might
      # have inconsistent data.
      rename_column(table, column, temp_column)
      rename_column(table, temp_undo_cleanup_column, column)

      install_rename_triggers(table, column, temp_column)
    end
  rescue StandardError
    # create_column_from can not run inside a transaction, which means
    #  that there is a risk that if any of the operations that follow it
    #  fail, we'll be left with an inconsistent schema
    # For those reasons, we make sure that we drop temp_undo_cleanup_column
    #  if an error is caught
    if column_exists?(table, temp_undo_cleanup_column)
      remove_column(table, temp_undo_cleanup_column)
    end

    raise
  end
end

#undo_rename_column_concurrently(table, old, new) ⇒ Object

Reverses operations performed by rename_column_concurrently.

This method takes care of removing previously installed triggers as well as removing the new column.

table - The name of the database table. old - The name of the old column. new - The name of the new column.



398
399
400
401
402
403
404
405
406
# File 'lib/gitlab/database/migration_helpers.rb', line 398

def undo_rename_column_concurrently(table, old, new)
  trigger_name = rename_trigger_name(table, old, new)

  check_trigger_permissions!(table)

  remove_rename_triggers(table, trigger_name)

  remove_column(table, new)
end

#update_column_in_batches(table_name, column, value, batch_size: nil, batch_column_name: :id, disable_lock_writes: false) ⇒ Object

Updates the value of a column in batches.

This method updates the table in batches of 5% of the total row count. A ‘batch_size` option can also be passed to set this to a fixed number. This method will continue updating rows until no rows remain.

When given a block this method will yield two values to the block:

  1. An instance of ‘Arel::Table` for the table that is being updated.

  2. The query to run as an Arel object.

By supplying a block one can add extra conditions to the queries being executed. Note that the same block is used for all queries.

Example:

update_column_in_batches(:projects, :foo, 10) do |table, query|
  query.where(table[:some_column].eq('hello'))
end

This would result in this method updating only rows where ‘projects.some_column` equals “hello”.

table - The name of the table. column - The name of the column to update. value - The value for the column.

The ‘value` argument is typically a literal. To perform a computed update, an Arel literal can be used instead:

update_value = Arel.sql('bar * baz')

update_column_in_batches(:projects, :foo, update_value) do |table, query|
  query.where(table[:some_column].eq('hello'))
end

Rubocop’s Metrics/AbcSize metric is disabled for this method as Rubocop determines this method to be too complex while there’s no way to make it less “complex” without introducing extra methods (which actually will make things more complex).

‘batch_column_name` option is for tables without primary key, in this case another unique integer column can be used. Example: :user_id

rubocop: disable Metrics/AbcSize



295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
# File 'lib/gitlab/database/migration_helpers.rb', line 295

def update_column_in_batches(table_name, column, value, batch_size: nil, batch_column_name: :id, disable_lock_writes: false)
  if transaction_open?
    raise 'update_column_in_batches can not be run inside a transaction, ' \
      'you can disable transactions by calling disable_ddl_transaction! ' \
      'in the body of your migration class'
  end

  table = Arel::Table.new(table_name)

  count_arel = table.project(Arel.star.count.as('count'))
  count_arel = yield table, count_arel if block_given?

  total = exec_query(count_arel.to_sql).to_a.first['count'].to_i

  return if total == 0

  if batch_size.nil?
    # Update in batches of 5% until we run out of any rows to update.
    batch_size = ((total / 100.0) * 5.0).ceil
    max_size = 1000

    # The upper limit is 1000 to ensure we don't lock too many rows. For
    # example, for "merge_requests" even 1% of the table is around 35 000
    # rows for GitLab.com.
    batch_size = max_size if batch_size > max_size
  end

  start_arel = table.project(table[batch_column_name]).order(table[batch_column_name].asc).take(1)
  start_arel = yield table, start_arel if block_given?
  start_id = exec_query(start_arel.to_sql).to_a.first[batch_column_name.to_s].to_i

  loop do
    stop_arel = table.project(table[batch_column_name])
      .where(table[batch_column_name].gteq(start_id))
      .order(table[batch_column_name].asc)
      .take(1)
      .skip(batch_size)

    stop_arel = yield table, stop_arel if block_given?
    stop_row = exec_query(stop_arel.to_sql).to_a.first

    update_arel = Arel::UpdateManager.new
      .table(table)
      .set([[table[column], value]])
      .where(table[batch_column_name].gteq(start_id))

    if stop_row
      stop_id = stop_row[batch_column_name.to_s].to_i
      start_id = stop_id
      update_arel = update_arel.where(table[batch_column_name].lt(stop_id))
    end

    update_arel = yield table, update_arel if block_given?

    transaction do
      execute("SELECT set_config('lock_writes.#{table_name}', 'false', true)") if disable_lock_writes
      execute(update_arel.to_sql)
    end

    # There are no more rows left to update.
    break unless stop_row
  end
end