This file is indexed.

/usr/lib/ruby/vendor_ruby/sequel/extensions/schema_dumper.rb is in ruby-sequel 3.36.1-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
# The schema_dumper extension supports dumping tables and indexes
# in a Sequel::Migration format, so they can be restored on another
# database (which can be the same type or a different type than
# the current database).  The main interface is through
# Sequel::Database#dump_schema_migration.

module Sequel
  class Database
    # Dump foreign key constraints for all tables as a migration. This complements
    # the :foreign_keys=>false option to dump_schema_migration. This only dumps
    # the constraints (not the columns) using alter_table/add_foreign_key with an
    # array of columns.
    #
    # Note that the migration this produces does not have a down
    # block, so you cannot reverse it.
    def dump_foreign_key_migration(options={})
      ts = tables(options)
      <<END_MIG
Sequel.migration do
  up do
#{ts.sort_by{|t| t.to_s}.map{|t| dump_table_foreign_keys(t)}.reject{|x| x == ''}.join("\n\n").gsub(/^/o, '    ')}
  end
end
END_MIG
    end

    # Dump indexes for all tables as a migration.  This complements
    # the :indexes=>false option to dump_schema_migration. Options:
    # * :same_db - Create a dump for the same database type, so
    #   don't ignore errors if the index statements fail.
    # * :index_names - If set to false, don't record names of indexes. If
    #   set to :namespace, prepend the table name to the index name if the
    #   database does not use a global index namespace.
    def dump_indexes_migration(options={})
      ts = tables(options)
      <<END_MIG
Sequel.migration do
  up do
#{ts.sort_by{|t| t.to_s}.map{|t| dump_table_indexes(t, :add_index, options)}.reject{|x| x == ''}.join("\n\n").gsub(/^/o, '    ')}
  end
  
  down do
#{ts.sort_by{|t| t.to_s}.reverse.map{|t| dump_table_indexes(t, :drop_index, options)}.reject{|x| x == ''}.join("\n\n").gsub(/^/o, '    ')}
  end
end
END_MIG
    end

    # Return a string that contains a Sequel::Migration subclass that when
    # run would recreate the database structure. Options:
    # * :same_db - Don't attempt to translate database types to ruby types.
    #   If this isn't set to true, all database types will be translated to
    #   ruby types, but there is no guarantee that the migration generated
    #   will yield the same type.  Without this set, types that aren't
    #   recognized will be translated to a string-like type.
    # * :foreign_keys - If set to false, don't dump foreign_keys
    # * :indexes - If set to false, don't dump indexes (they can be added
    #   later via dump_index_migration).
    # * :index_names - If set to false, don't record names of indexes. If
    #   set to :namespace, prepend the table name to the index name.
    def dump_schema_migration(options={})
      options = options.dup
      if options[:indexes] == false && !options.has_key?(:foreign_keys)
        # Unless foreign_keys option is specifically set, disable if indexes
        # are disabled, as foreign keys that point to non-primary keys rely
        # on unique indexes being created first
        options[:foreign_keys] = false
      end

      ts = sort_dumped_tables(tables(options), options)
      skipped_fks = if sfk = options[:skipped_foreign_keys]
        # Handle skipped foreign keys by adding them at the end via
        # alter_table/add_foreign_key.  Note that skipped foreign keys
        # probably result in a broken down migration.
        sfka = sfk.sort_by{|table, fks| table.to_s}.map{|table, fks| dump_add_fk_constraints(table, fks.values)}
        sfka.join("\n\n").gsub(/^/o, '    ') unless sfka.empty?
      end

      <<END_MIG
Sequel.migration do
  up do
#{ts.map{|t| dump_table_schema(t, options)}.join("\n\n").gsub(/^/o, '    ')}#{"\n    \n" if skipped_fks}#{skipped_fks}
  end
  
  down do
    drop_table(#{ts.reverse.inspect[1...-1]})
  end
end
END_MIG
    end

    # Return a string with a create table block that will recreate the given
    # table's schema.  Takes the same options as dump_schema_migration.
    def dump_table_schema(table, options={})
      table = table.value.to_s if table.is_a?(SQL::Identifier)
      gen = dump_table_generator(table, options)
      commands = [gen.dump_columns, gen.dump_constraints, gen.dump_indexes].reject{|x| x == ''}.join("\n\n")
      "create_table(#{table.inspect}#{', :ignore_index_errors=>true' if !options[:same_db] && options[:indexes] != false && !gen.indexes.empty?}) do\n#{commands.gsub(/^/o, '  ')}\nend"
    end

    private
        
    # If a database default exists and can't be converted, return the string with the inspect
    # method modified so that .lit is always appended after it, only if the
    # :same_db option is used.
    def column_schema_to_ruby_default_fallback(default, options)
      if default.is_a?(String) && options[:same_db] && use_column_schema_to_ruby_default_fallback?
        default = default.to_s
        def default.inspect
          "#{super}.lit"  # core_sql use
        end
        default
      end
    end

    # Convert the given name and parsed database schema into an array with a method
    # name and arguments to it to pass to a Schema::Generator to recreate the column.
    def column_schema_to_generator_opts(name, schema, options)
      if options[:single_pk] && schema_autoincrementing_primary_key?(schema)
        type_hash = options[:same_db] ? {:type=>schema[:db_type]} : column_schema_to_ruby_type(schema)
        [:table, :key, :on_delete, :on_update, :deferrable].each{|f| type_hash[f] = schema[f] if schema[f]}
        if type_hash == {:type=>Integer} || type_hash == {:type=>"integer"}
          [:primary_key, name]
        else
          [:primary_key, name, type_hash]
        end
      else
        col_opts = options[:same_db] ? {:type=>schema[:db_type]} : column_schema_to_ruby_type(schema)
        type = col_opts.delete(:type)
        col_opts.delete(:size) if col_opts[:size].nil?
        col_opts[:default] = if schema[:ruby_default].nil?
          column_schema_to_ruby_default_fallback(schema[:default], options)
        else
          schema[:ruby_default]
        end
        col_opts.delete(:default) if col_opts[:default].nil?
        col_opts[:null] = false if schema[:allow_null] == false
        if table = schema[:table]
          [:key, :on_delete, :on_update, :deferrable].each{|f| col_opts[f] = schema[f] if schema[f]}
          [:foreign_key, name, table, col_opts]
        else
          [:column, name, type, col_opts]
        end
      end
    end

    # Convert the column schema information to a hash of column options, one of which must
    # be :type.  The other options added should modify that type (e.g. :size).  If a
    # database type is not recognized, return it as a String type.
    def column_schema_to_ruby_type(schema)
      case t = schema[:db_type].downcase
      when /\A(medium|small)?int(?:eger)?(?:\((\d+)\))?( unsigned)?\z/o
        if !$1 && $2 && $2.to_i >= 10 && $3
          # Unsigned integer type with 10 digits can potentially contain values which
          # don't fit signed integer type, so use bigint type in target database.
          {:type=>Bignum}
        else
          {:type=>Integer}
        end
      when /\Atinyint(?:\((\d+)\))?(?: unsigned)?\z/o
        {:type =>schema[:type] == :boolean ? TrueClass : Integer}
      when /\Abigint(?:\((?:\d+)\))?(?: unsigned)?\z/o
        {:type=>Bignum}
      when /\A(?:real|float|double(?: precision)?)\z/o
        {:type=>Float}
      when 'boolean'
        {:type=>TrueClass}
      when /\A(?:(?:tiny|medium|long|n)?text|clob)\z/o
        {:type=>String, :text=>true}
      when 'date'
        {:type=>Date}
      when /\A(?:small)?datetime\z/o
        {:type=>DateTime}
      when /\Atimestamp(?:\((\d+)\))?(?: with(?:out)? time zone)?\z/o
        {:type=>DateTime, :size=>($1.to_i if $1)}
      when /\Atime(?: with(?:out)? time zone)?\z/o
        {:type=>Time, :only_time=>true}
      when /\An?char(?:acter)?(?:\((\d+)\))?\z/o
        {:type=>String, :size=>($1.to_i if $1), :fixed=>true}
      when /\A(?:n?varchar|character varying|bpchar|string)(?:\((\d+)\))?\z/o
        {:type=>String, :size=>($1.to_i if $1)}
      when /\A(?:small)?money\z/o
        {:type=>BigDecimal, :size=>[19,2]}
      when /\A(?:decimal|numeric|number)(?:\((\d+)(?:,\s*(\d+))?\))?\z/o
        s = [($1.to_i if $1), ($2.to_i if $2)].compact
        {:type=>BigDecimal, :size=>(s.empty? ? nil : s)}
      when /\A(?:bytea|(?:tiny|medium|long)?blob|(?:var)?binary)(?:\((\d+)\))?\z/o
        {:type=>File, :size=>($1.to_i if $1)}
      when /\A(?:year|(?:int )?identity)\z/o
        {:type=>Integer}
      else
        {:type=>String}
      end
    end

    # For the table and foreign key metadata array, return an alter_table
    # string that would add the foreign keys if run in a migration.
    def dump_add_fk_constraints(table, fks)
      sfks = "alter_table(#{table.inspect}) do\n"
      sfks << Schema::Generator.new(self) do
        fks.sort_by{|fk| fk[:columns].map{|c| c.to_s}}.each do |fk|
          foreign_key fk[:columns], fk
        end
      end.dump_constraints.gsub(/^foreign_key /, '  add_foreign_key ')
      sfks << "\nend"
    end

    # For the table given, get the list of foreign keys and return an alter_table
    # string that would add the foreign keys if run in a migration.
    def dump_table_foreign_keys(table, options={})
      begin
        fks = foreign_key_list(table, options).sort_by{|fk| fk[:columns].map{|c| c.to_s}}
      rescue Sequel::NotImplemented
        return ''
      end

      if fks.empty?
        ''
      else
        dump_add_fk_constraints(table, fks)
      end
    end

    # Return a Schema::Generator object that will recreate the
    # table's schema.  Takes the same options as dump_schema_migration.
    def dump_table_generator(table, options={})
      table = table.value.to_s if table.is_a?(SQL::Identifier)
      raise(Error, "must provide table as a Symbol, String, or Sequel::SQL::Identifier") unless [String, Symbol].any?{|c| table.is_a?(c)}
      s = schema(table).dup
      pks = s.find_all{|x| x.last[:primary_key] == true}.map{|x| x.first}
      options = options.merge(:single_pk=>true) if pks.length == 1
      m = method(:column_schema_to_generator_opts)
      im = method(:index_to_generator_opts)

      if options[:indexes] != false
        begin
          indexes = indexes(table).sort_by{|k,v| k.to_s}
        rescue Sequel::NotImplemented
          nil
        end
      end

      if options[:foreign_keys] != false
        begin
          fk_list = foreign_key_list(table)
          
          if (sfk = options[:skipped_foreign_keys]) && (sfkt = sfk[table])
            fk_list.delete_if{|fk| sfkt.has_key?(fk[:columns])}
          end

          composite_fks, single_fks = fk_list.partition{|h| h[:columns].length > 1}
          fk_hash = {}

          single_fks.each do |fk|
            column = fk.delete(:columns).first
            fk.delete(:name)
            fk_hash[column] = fk
          end

          s = s.map do |name, info|
            if fk_info = fk_hash[name]
              [name, fk_info.merge(info)]
            else
              [name, info]
            end
          end
        rescue Sequel::NotImplemented
          nil
        end
      end

      Schema::Generator.new(self) do
        s.each{|name, info| send(*m.call(name, info, options))}
        primary_key(pks) if !@primary_key && pks.length > 0
        indexes.each{|iname, iopts| send(:index, iopts[:columns], im.call(table, iname, iopts, options))} if indexes
        composite_fks.each{|fk| send(:foreign_key, fk[:columns], fk)} if composite_fks
      end
    end

    # Return a string that containing add_index/drop_index method calls for
    # creating the index migration.
    def dump_table_indexes(table, meth, options={})
      begin
        indexes = indexes(table).sort_by{|k,v| k.to_s}
      rescue Sequel::NotImplemented
        return ''
      end
      im = method(:index_to_generator_opts)
      gen = Schema::Generator.new(self) do
        indexes.each{|iname, iopts| send(:index, iopts[:columns], im.call(table, iname, iopts, options))}
      end
      gen.dump_indexes(meth=>table, :ignore_errors=>!options[:same_db])
    end

    # Convert the parsed index information into options to the Generators index method. 
    def index_to_generator_opts(table, name, index_opts, options={})
      h = {}
      if options[:index_names] != false && default_index_name(table, index_opts[:columns]) != name.to_s
        if options[:index_names] == :namespace && !global_index_namespace?
          h[:name] = "#{table}_#{name}".to_sym
        else
          h[:name] = name
        end
      end
      h[:unique] = true if index_opts[:unique]
      h
    end

    # Sort the tables so that referenced tables are created before tables that
    # reference them, and then by name.  If foreign keys are disabled, just sort by name.
    def sort_dumped_tables(tables, options={})
      sort_topologically = if options[:foreign_keys] != false
        begin
          foreign_key_list(:some_table_that_does_not_exist)
          true
        rescue Sequel::NotImplemented
          false
        rescue
          true
        end
      end

      if sort_topologically
        table_fks = {}
        tables.each{|t| table_fks[t] = foreign_key_list(t)}
        # Remove self referential foreign keys, not important when sorting.
        table_fks.each{|t, fks| fks.delete_if{|fk| fk[:table] == t}}
        tables, skipped_foreign_keys = sort_dumped_tables_topologically(table_fks, [])
        options[:skipped_foreign_keys] = skipped_foreign_keys
        tables
      else
        tables.sort_by{|t| t.to_s}
      end
    end

    # Do a topological sort of tables, so that referenced tables
    # come before referencing tables.  Returns an array of sorted
    # tables and a hash of skipped foreign keys.  The hash will be
    # empty unless there are circular dependencies.
    def sort_dumped_tables_topologically(table_fks, sorted_tables)
      skipped_foreign_keys = {}

      until table_fks.empty? 
        this_loop = []

        table_fks.each do |table, fks|
          fks.delete_if{|fk| !table_fks.has_key?(fk[:table])}
          this_loop << table if fks.empty?
        end

        if this_loop.empty?
          # No tables were changed this round, there must be a circular dependency.
          # Break circular dependency by picking the table with the least number of
          # outstanding foreign keys and skipping those foreign keys.
          # The skipped foreign keys will be added at the end of the
          # migration.
          skip_table, skip_fks = table_fks.sort_by{|table, fks| [fks.length, table.to_s]}.first
          skip_fks_hash = skipped_foreign_keys[skip_table] = {}
          skip_fks.each{|fk| skip_fks_hash[fk[:columns]] = fk}
          this_loop << skip_table
        end

        # Add sorted tables from this loop to the final list
        sorted_tables.concat(this_loop.sort_by{|t| t.to_s})

        # Remove tables that were handled this loop
        this_loop.each{|t| table_fks.delete(t)}
      end

      [sorted_tables, skipped_foreign_keys]
    end
    
    # Don't use the "...".lit fallback on MySQL, since the defaults it uses aren't
    # valid literal SQL values.
    def use_column_schema_to_ruby_default_fallback?
      database_type != :mysql
    end
  end

  module Schema
    class Generator
      # Dump this generator's columns to a string that could be evaled inside
      # another instance to represent the same columns
      def dump_columns
        strings = []
        cols = columns.dup
        cols.each do |x|
          x.delete(:on_delete) if x[:on_delete] == :no_action
          x.delete(:on_update) if x[:on_update] == :no_action
        end
        if pkn = primary_key_name
          cols.delete_if{|x| x[:name] == pkn}
          pk = @primary_key.dup
          pkname = pk.delete(:name)
          @db.serial_primary_key_options.each{|k,v| pk.delete(k) if v == pk[k]}
          strings << "primary_key #{pkname.inspect}#{opts_inspect(pk)}"
        end
        cols.each do |c|
          c = c.dup
          name = c.delete(:name)
          strings << if table = c.delete(:table)
            c.delete(:type) if c[:type] == Integer || c[:type] == 'integer'
            "foreign_key #{name.inspect}, #{table.inspect}#{opts_inspect(c)}"
          else
            type = c.delete(:type)
            opts = opts_inspect(c)
            if type.is_a?(Class)
              "#{type.name} #{name.inspect}#{opts}"
            else
              "column #{name.inspect}, #{type.inspect}#{opts}"
            end
          end
        end
        strings.join("\n")
      end

      # Dump this generator's constraints to a string that could be evaled inside
      # another instance to represent the same constraints
      def dump_constraints
        cs = constraints.map do |c|
          c = c.dup
          type = c.delete(:type)
          case type
          when :check
            raise(Error, "can't dump check/constraint specified with Proc") if c[:check].is_a?(Proc)
            name = c.delete(:name)
            if !name and c[:check].length == 1 and c[:check].first.is_a?(Hash)
              "check #{c[:check].first.inspect[1...-1]}"
            else
              "#{name ? "constraint #{name.inspect}," : 'check'} #{c[:check].map{|x| x.inspect}.join(', ')}"
            end
          when :foreign_key
            c.delete(:on_delete) if c[:on_delete] == :no_action
            c.delete(:on_update) if c[:on_update] == :no_action
            c.delete(:deferrable) unless c[:deferrable]
            cols = c.delete(:columns)
            table = c.delete(:table)
            "#{type} #{cols.inspect}, #{table.inspect}#{opts_inspect(c)}"
          else
            cols = c.delete(:columns)
            "#{type} #{cols.inspect}#{opts_inspect(c)}"
          end
        end
        cs.join("\n")
      end

      # Dump this generator's indexes to a string that could be evaled inside
      # another instance to represent the same indexes. Options:
      # * :add_index - Use add_index instead of index, so the methods
      #   can be called outside of a generator but inside a migration.
      #   The value of this option should be the table name to use.
      # * :drop_index - Same as add_index, but create drop_index statements.
      # * :ignore_errors - Add the ignore_errors option to the outputted indexes
      def dump_indexes(options={})
        is = indexes.map do |c|
          c = c.dup
          cols = c.delete(:columns)
          if table = options[:add_index] || options[:drop_index]
            "#{options[:drop_index] ? 'drop' : 'add'}_index #{table.inspect}, #{cols.inspect}#{', :ignore_errors=>true' if options[:ignore_errors]}#{opts_inspect(c)}"
          else
            "index #{cols.inspect}#{opts_inspect(c)}"
          end
        end
        is = is.reverse if options[:drop_index]
        is.join("\n")
      end

      private

      # Return a string that converts the given options into one
      # suitable for literal ruby code, handling default values
      # that don't default to a literal interpretation.
      def opts_inspect(opts)
        if opts[:default]
          opts = opts.dup
          de = case d = opts.delete(:default)
          when BigDecimal, Sequel::SQL::Blob
            "#{d.class.name}.new(#{d.to_s.inspect})"
          when DateTime, Date
            "#{d.class.name}.parse(#{d.to_s.inspect})"
          when Time
            "#{d.class.name}.parse(#{d.strftime('%H:%M:%S').inspect})"
          else
            d.inspect
          end
          ", :default=>#{de}#{", #{opts.inspect[1...-1]}" if opts.length > 0}"
        else
          ", #{opts.inspect[1...-1]}" if opts.length > 0
        end
      end
    end
  end
end