|
|
|
@ -15,306 +15,193 @@ class AwesomeLegacyJournalsMigration < ActiveRecord::Migration |
|
|
|
|
class IncompleteJournalsError < ::StandardError |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
class LegacyJournalMigrator |
|
|
|
|
attr_accessor :table_name |
|
|
|
|
|
|
|
|
|
def initialize(table_name) |
|
|
|
|
self.table_name = table_name |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def column_names |
|
|
|
|
@column_names ||= ActiveRecord::Base.connection.columns(table_name).map(&:name) |
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
class PreviousState < Struct.new(:journal, :journaled_id, :type) |
|
|
|
|
def set(journal, journaled_id, type) |
|
|
|
|
self.journal = journal |
|
|
|
|
self.journaled_id = journaled_id |
|
|
|
|
self.type = type |
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def up |
|
|
|
|
check_assumptions |
|
|
|
|
|
|
|
|
|
previous = PreviousState.new(0, "", {}) |
|
|
|
|
legacy_journals = fetch_legacy_journals |
|
|
|
|
|
|
|
|
|
journal_classes = { |
|
|
|
|
"AttachmentJournal" => LegacyJournalMigrator.new("attachment_journals"), |
|
|
|
|
"ChangesetJournal" => LegacyJournalMigrator.new("changeset_journals"), |
|
|
|
|
"NewsJournal" => LegacyJournalMigrator.new("news_journals"), |
|
|
|
|
"MessageJournal" => LegacyJournalMigrator.new("message_journals"), |
|
|
|
|
"WorkPackageJournal" => LegacyJournalMigrator.new("work_package_journals"), |
|
|
|
|
"TimeEntryJournal" => LegacyJournalMigrator.new("time_entry_journals"), |
|
|
|
|
"WikiContentJournal" => LegacyJournalMigrator.new("wiki_content_journals") |
|
|
|
|
} |
|
|
|
|
puts "Migrating #{legacy_journals.count} legacy journals." |
|
|
|
|
|
|
|
|
|
fetch_legacy_journals.each do |legacy_journal| |
|
|
|
|
legacy_journals.each_with_index do |legacy_journal, count| |
|
|
|
|
|
|
|
|
|
# turn id fields into integers. |
|
|
|
|
["id", "journaled_id", "user_id", "version"].each do |f| |
|
|
|
|
legacy_journal[f] = legacy_journal[f].to_i |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
legacy_journal["changed_data"] = YAML.load(legacy_journal["changed_data"]) |
|
|
|
|
type = legacy_journal["type"] |
|
|
|
|
|
|
|
|
|
journaled_id, type, version = legacy_journal["journaled_id"], legacy_journal["type"], legacy_journal["version"] |
|
|
|
|
migrator = get_migrator(type) |
|
|
|
|
|
|
|
|
|
journal_class = journal_classes[type] |
|
|
|
|
if migrator.nil? |
|
|
|
|
ignored[type] += 1 |
|
|
|
|
|
|
|
|
|
if journal_class.nil? |
|
|
|
|
puts "Ignoring type `#{type}`" |
|
|
|
|
next |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
table = journal_class.table_name |
|
|
|
|
|
|
|
|
|
# actually insert/update stuff in the database. |
|
|
|
|
journal = get_journal(journaled_id, type, version) |
|
|
|
|
journal_id = journal["id"] |
|
|
|
|
|
|
|
|
|
# compute the combined journal from current and all previous changesets. |
|
|
|
|
combined_journal = legacy_journal["changed_data"] |
|
|
|
|
if previous.journaled_id == journaled_id && previous.type == type |
|
|
|
|
combined_journal = previous.journal.merge(combined_journal) |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
# remember the combined journal as the previous one for the next iteration. |
|
|
|
|
previous.set(combined_journal, journaled_id, type) |
|
|
|
|
|
|
|
|
|
data = fetch_journal_data(journal_id, table) |
|
|
|
|
migrator.migrate(legacy_journal) |
|
|
|
|
|
|
|
|
|
to_insert = combined_journal.inject({}) do |mem, (key, value)| |
|
|
|
|
if journal_class.column_names.include?(key) |
|
|
|
|
# The old journal's values attribute was structured like |
|
|
|
|
# [old_value, new_value] |
|
|
|
|
# We only need the new_value |
|
|
|
|
mem[key] = value.last |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
mem |
|
|
|
|
if count > 0 && (count % 1000 == 0) |
|
|
|
|
puts "#{count} journals migrated" |
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
keys = to_insert.keys |
|
|
|
|
values = to_insert.values |
|
|
|
|
|
|
|
|
|
migrate_key_value_pairs!(keys, values, table, legacy_journal, journal_id) |
|
|
|
|
|
|
|
|
|
if data.size > 1 |
|
|
|
|
|
|
|
|
|
raise AmbiguousJournalsError, <<-MESSAGE.split("\n").map(&:strip!).join(" ") + "\n" |
|
|
|
|
It appears there are ambiguous journal data. Please make sure |
|
|
|
|
journal data are consistent and that the unique constraint on |
|
|
|
|
journal_id is met. |
|
|
|
|
MESSAGE |
|
|
|
|
|
|
|
|
|
elsif data.size == 0 |
|
|
|
|
execute <<-SQL |
|
|
|
|
INSERT INTO #{quoted_table_name(table)} (journal_id#{", " + keys.collect{|k| map_key(k) }.join(", ") unless keys.empty? }) |
|
|
|
|
VALUES (#{quote_value(journal_id)}#{", " + values.map{|d| quote_value(d)}.join(", ") unless values.empty?}); |
|
|
|
|
SQL |
|
|
|
|
|
|
|
|
|
data = fetch_journal_data(journal_id, table) |
|
|
|
|
end |
|
|
|
|
ignored.each do |type, amount| |
|
|
|
|
puts "#{type} was ignored #{amount} times" |
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
data = data.first |
|
|
|
|
def down |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
sql_statements = <<-SQL |
|
|
|
|
UPDATE journals |
|
|
|
|
SET journable_data_id = #{quote_value(journal_id)}, |
|
|
|
|
journable_data_type = #{quote_value(type)}, |
|
|
|
|
user_id = #{quote_value(legacy_journal["user_id"])}, |
|
|
|
|
notes = #{quote_value(legacy_journal["notes"])}, |
|
|
|
|
created_at = #{quote_value(legacy_journal["created_at"])}, |
|
|
|
|
activity_type = #{quote_value(legacy_journal["activity_type"])} |
|
|
|
|
WHERE id = #{quote_value(journal_id)}; |
|
|
|
|
SQL |
|
|
|
|
private |
|
|
|
|
|
|
|
|
|
sql_statements = <<-SQL + sql_statements unless keys.empty? |
|
|
|
|
UPDATE #{quoted_table_name(table)} |
|
|
|
|
SET #{(keys.each_with_index.map {|k,i| "#{map_key(k)} = #{quote_value(values[i])}"}).join(", ")} |
|
|
|
|
WHERE id = #{data["id"]}; |
|
|
|
|
SQL |
|
|
|
|
|
|
|
|
|
execute sql_statements |
|
|
|
|
def ignored |
|
|
|
|
@ignored ||= Hash.new do |k, v| |
|
|
|
|
0 |
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def get_migrator(type) |
|
|
|
|
@migrators ||= begin |
|
|
|
|
|
|
|
|
|
{ |
|
|
|
|
"AttachmentJournal" => attachment_migrator, |
|
|
|
|
"ChangesetJournal" => changesets_migrator, |
|
|
|
|
"NewsJournal" => news_migrator, |
|
|
|
|
"MessageJournal" => message_migrator, |
|
|
|
|
"WorkPackageJournal" => work_package_migrator, |
|
|
|
|
"IssueJournal" => work_package_migrator, |
|
|
|
|
"Timelines_PlanningElementJournal" => work_package_migrator, |
|
|
|
|
"TimeEntryJournal" => time_entry_migrator, |
|
|
|
|
"WikiContentJournal" => wiki_content_migrator |
|
|
|
|
} |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
@migrators[type] |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def down |
|
|
|
|
def attachment_migrator |
|
|
|
|
LegacyJournalMigrator.new("AttachmentJournal", "attachment_journals") |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
private |
|
|
|
|
|
|
|
|
|
def map_key(key) |
|
|
|
|
case key |
|
|
|
|
when "issue_id" |
|
|
|
|
"work_package_id" |
|
|
|
|
else |
|
|
|
|
key |
|
|
|
|
end |
|
|
|
|
def changesets_migrator |
|
|
|
|
LegacyJournalMigrator.new("ChangesetJournal", "changeset_journals") |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def migrate_key_value_pairs!(keys, values, table, legacy_journal, journal_id) |
|
|
|
|
migrate_key_value_pairs_for_wiki_contents!(keys, values, table, legacy_journal, journal_id) |
|
|
|
|
migrate_key_value_pairs_for_work_packages!(keys, values, table, legacy_journal, journal_id) |
|
|
|
|
def news_migrator |
|
|
|
|
LegacyJournalMigrator.new("NewsJournal", "news_journals") |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def migrate_key_value_pairs_for_work_packages!(keys, values, table, legacy_journal, journal_id) |
|
|
|
|
def message_migrator |
|
|
|
|
LegacyJournalMigrator.new("MessageJournal", "message_journals") |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
if table == "work_package_journals" |
|
|
|
|
def work_package_migrator |
|
|
|
|
LegacyJournalMigrator.new "WorkPackageJournal", "work_package_journals" do |
|
|
|
|
def migrate_key_value_pairs!(keys, values, legacy_journal, journal_id) |
|
|
|
|
attachments = keys.select { |d| d =~ /attachments_.*/ } |
|
|
|
|
attachments.each do |k| |
|
|
|
|
|
|
|
|
|
attachments = keys.select { |d| d =~ /attachments_.*/ } |
|
|
|
|
attachments.each do |k| |
|
|
|
|
attachment_id = k.split("_").last.to_i |
|
|
|
|
|
|
|
|
|
attachment_id = k.split("_").last.to_i |
|
|
|
|
attachable = ActiveRecord::Base.connection.select_all <<-SQL |
|
|
|
|
SELECT * |
|
|
|
|
FROM #{attachable_table_name} AS a |
|
|
|
|
WHERE a.journal_id = #{quote_value(journal_id)} AND a.attachment_id = #{attachment_id}; |
|
|
|
|
SQL |
|
|
|
|
|
|
|
|
|
attachable = ActiveRecord::Base.connection.select_all <<-SQL |
|
|
|
|
SELECT * |
|
|
|
|
FROM #{quoted_table_name("attachable_journals")} AS a |
|
|
|
|
WHERE a.journal_id = #{quote_value(journal_id)} AND a.attachment_id = #{attachment_id}; |
|
|
|
|
SQL |
|
|
|
|
if attachable.size > 1 |
|
|
|
|
|
|
|
|
|
if attachable.size > 1 |
|
|
|
|
raise AmbiguousAttachableJournalError, <<-MESSAGE.split("\n").map(&:strip!).join(" ") + "\n" |
|
|
|
|
It appears there are ambiguous attachable journal data. |
|
|
|
|
Please make sure attachable journal data are consistent and |
|
|
|
|
that the unique constraint on journal_id and attachment_id |
|
|
|
|
is met. |
|
|
|
|
MESSAGE |
|
|
|
|
|
|
|
|
|
raise AmbiguousAttachableJournalError, <<-MESSAGE.split("\n").map(&:strip!).join(" ") + "\n" |
|
|
|
|
It appears there are ambiguous attachable journal data. |
|
|
|
|
Please make sure attachable journal data are consistent and |
|
|
|
|
that the unique constraint on journal_id and attachment_id |
|
|
|
|
is met. |
|
|
|
|
MESSAGE |
|
|
|
|
elsif attachable.size == 0 |
|
|
|
|
|
|
|
|
|
elsif attachable.size == 0 |
|
|
|
|
db_execute <<-SQL |
|
|
|
|
INSERT INTO #{attachable_table_name}(journal_id, attachment_id) |
|
|
|
|
VALUES (#{quote_value(journal_id)}, #{quote_value(attachment_id)}); |
|
|
|
|
SQL |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
execute <<-SQL |
|
|
|
|
INSERT INTO #{quoted_table_name("attachable_journals")}(journal_id, attachment_id) |
|
|
|
|
VALUES (#{quote_value(journal_id)}, #{quote_value(attachment_id)}); |
|
|
|
|
SQL |
|
|
|
|
j = keys.index(k) |
|
|
|
|
[keys, values].each { |a| a.delete_at(j) } |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
j = keys.index(k) |
|
|
|
|
[keys, values].each { |a| a.delete_at(j) } |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
custom_values = keys.select { |d| d =~ /custom_values.*/ } |
|
|
|
|
custom_values.each do |k| |
|
|
|
|
custom_values = keys.select { |d| d =~ /custom_values.*/ } |
|
|
|
|
custom_values.each do |k| |
|
|
|
|
|
|
|
|
|
custom_field_id = k.split("_values").last.to_i |
|
|
|
|
value = values[keys.index k] |
|
|
|
|
custom_field_id = k.split("_values").last.to_i |
|
|
|
|
value = values[keys.index k] |
|
|
|
|
|
|
|
|
|
customizable = ActiveRecord::Base.connection.select_all <<-SQL |
|
|
|
|
SELECT * |
|
|
|
|
FROM #{quoted_table_name("customizable_journals")} AS a |
|
|
|
|
WHERE a.journal_id = #{quote_value(journal_id)} AND a.custom_field_id = #{custom_field_id}; |
|
|
|
|
SQL |
|
|
|
|
customizable = ActiveRecord::Base.connection.select_all <<-SQL |
|
|
|
|
SELECT * |
|
|
|
|
FROM #{customizable_table_name} AS a |
|
|
|
|
WHERE a.journal_id = #{quote_value(journal_id)} AND a.custom_field_id = #{custom_field_id}; |
|
|
|
|
SQL |
|
|
|
|
|
|
|
|
|
if customizable.size > 1 |
|
|
|
|
if customizable.size > 1 |
|
|
|
|
|
|
|
|
|
raise AmbiguousCustomizableJournalError, <<-MESSAGE.split("\n").map(&:strip!).join(" ") + "\n" |
|
|
|
|
It appears there are ambiguous customizable journal |
|
|
|
|
data. Please make sure customizable journal data are |
|
|
|
|
consistent and that the unique constraint on journal_id and |
|
|
|
|
custom_field_id is met. |
|
|
|
|
MESSAGE |
|
|
|
|
raise AmbiguousCustomizableJournalError, <<-MESSAGE.split("\n").map(&:strip!).join(" ") + "\n" |
|
|
|
|
It appears there are ambiguous customizable journal |
|
|
|
|
data. Please make sure customizable journal data are |
|
|
|
|
consistent and that the unique constraint on journal_id and |
|
|
|
|
custom_field_id is met. |
|
|
|
|
MESSAGE |
|
|
|
|
|
|
|
|
|
elsif customizable.size == 0 |
|
|
|
|
elsif customizable.size == 0 |
|
|
|
|
|
|
|
|
|
execute <<-SQL |
|
|
|
|
INSERT INTO #{quoted_table_name("customizable_journals")}(journal_id, custom_field_id, value) |
|
|
|
|
VALUES (#{quote_value(journal_id)}, #{quote_value(custom_field_id)}, #{quote_value(value)}); |
|
|
|
|
SQL |
|
|
|
|
end |
|
|
|
|
db_execute <<-SQL |
|
|
|
|
INSERT INTO #{customizable_table_name}(journal_id, custom_field_id, value) |
|
|
|
|
VALUES (#{quote_value(journal_id)}, #{quote_value(custom_field_id)}, #{quote_value(value)}); |
|
|
|
|
SQL |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
j = keys.index(k) |
|
|
|
|
[keys, values].each { |a| a.delete_at(j) } |
|
|
|
|
j = keys.index(k) |
|
|
|
|
[keys, values].each { |a| a.delete_at(j) } |
|
|
|
|
|
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
# custom logic for changes wiki contents. |
|
|
|
|
def migrate_key_value_pairs_for_wiki_contents!(keys, values, table, legacy_journal, journal_id) |
|
|
|
|
|
|
|
|
|
if table == "wiki_content_journals" |
|
|
|
|
|
|
|
|
|
if keys.index("lock_version").nil? |
|
|
|
|
keys.push "lock_version" |
|
|
|
|
values.push legacy_journal["version"] |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
if !(data_index = keys.index("data")).nil? |
|
|
|
|
|
|
|
|
|
compression_index = keys.index("compression") |
|
|
|
|
compression = values[compression_index] |
|
|
|
|
def time_entry_migrator |
|
|
|
|
LegacyJournalMigrator.new("TimeEntryJournal", "time_entry_journals") |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
if !compression.empty? |
|
|
|
|
def wiki_content_migrator |
|
|
|
|
|
|
|
|
|
raise UnsupportedWikiContentJournalCompressionError, <<-MESSAGE.split("\n").map(&:strip!).join(" ") + "\n" |
|
|
|
|
There is a WikiContent journal that contains data in an |
|
|
|
|
unsupported compression: #{compression} |
|
|
|
|
MESSAGE |
|
|
|
|
LegacyJournalMigrator.new("WikiContentJournal", "wiki_content_journals") do |
|
|
|
|
|
|
|
|
|
def migrate_key_value_pairs!(keys, values, legacy_journal, journal_id) |
|
|
|
|
if keys.index("lock_version").nil? |
|
|
|
|
keys.push "lock_version" |
|
|
|
|
values.push legacy_journal["version"] |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
keys[data_index] = "text" |
|
|
|
|
if !(data_index = keys.index("data")).nil? |
|
|
|
|
|
|
|
|
|
keys.delete_at(compression_index) |
|
|
|
|
values.delete_at(compression_index) |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
# fetches specific journal data row. might be empty. |
|
|
|
|
def fetch_journal_data(journal_id, table) |
|
|
|
|
ActiveRecord::Base.connection.select_all <<-SQL |
|
|
|
|
SELECT * |
|
|
|
|
FROM #{quoted_table_name(table)} AS d |
|
|
|
|
WHERE d.journal_id = #{quote_value(journal_id)}; |
|
|
|
|
SQL |
|
|
|
|
end |
|
|
|
|
compression_index = keys.index("compression") |
|
|
|
|
compression = values[compression_index] |
|
|
|
|
|
|
|
|
|
# gets a journal row, and makes sure it has a valid id in the database. |
|
|
|
|
def get_journal(id, type, version) |
|
|
|
|
journal = fetch_journal(id, type, version) |
|
|
|
|
if !compression.empty? |
|
|
|
|
|
|
|
|
|
if journal.size > 1 |
|
|
|
|
raise UnsupportedWikiContentJournalCompressionError, <<-MESSAGE.split("\n").map(&:strip!).join(" ") + "\n" |
|
|
|
|
There is a WikiContent journal that contains data in an |
|
|
|
|
unsupported compression: #{compression} |
|
|
|
|
MESSAGE |
|
|
|
|
|
|
|
|
|
raise AmbiguousJournalsError, <<-MESSAGE.split("\n").map(&:strip!).join(" ") + "\n" |
|
|
|
|
It appears there are ambiguous journals. Please make sure |
|
|
|
|
journals are consistent and that the unique constraint on id, |
|
|
|
|
type and version is met. |
|
|
|
|
MESSAGE |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
elsif journal.size == 0 |
|
|
|
|
keys[data_index] = "text" |
|
|
|
|
|
|
|
|
|
execute <<-SQL |
|
|
|
|
INSERT INTO #{quoted_journals_table_name}(journable_id, journable_type, version, created_at) |
|
|
|
|
VALUES ( |
|
|
|
|
#{quote_value(id)}, |
|
|
|
|
#{quote_value(type)}, |
|
|
|
|
#{quote_value(version)}, |
|
|
|
|
#{quote_value(Time.now)} |
|
|
|
|
); |
|
|
|
|
SQL |
|
|
|
|
keys.delete_at(compression_index) |
|
|
|
|
values.delete_at(compression_index) |
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
journal = fetch_journal(id, type, version) |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
journal.first |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
# fetches specific journal row. might be empty. |
|
|
|
|
def fetch_journal(id, type, version) |
|
|
|
|
ActiveRecord::Base.connection.select_all <<-SQL |
|
|
|
|
SELECT * |
|
|
|
|
FROM #{quoted_journals_table_name} AS j |
|
|
|
|
WHERE j.journable_id = #{quote_value(id)} |
|
|
|
|
AND j.journable_type = #{quote_value(type)} |
|
|
|
|
AND j.version = #{quote_value(version)}; |
|
|
|
|
SQL |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
# fetches legacy journals. might me empty. |
|
|
|
@ -339,23 +226,10 @@ class AwesomeLegacyJournalsMigration < ActiveRecord::Migration |
|
|
|
|
attachments_and_changesets + remainder |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def quote_value name |
|
|
|
|
ActiveRecord::Base.connection.quote name |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def quoted_table_name name |
|
|
|
|
ActiveRecord::Base.connection.quote_table_name name |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def quoted_legacy_journals_table_name |
|
|
|
|
@quoted_legacy_journals_table_name ||= quote_table_name 'legacy_journals' |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def quoted_journals_table_name |
|
|
|
|
@quoted_journals_table_name ||= quote_table_name 'journals' |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def check_assumptions |
|
|
|
|
|
|
|
|
|
# SQL finds all those journals whose has more or less predecessors than |
|
|
|
@ -399,4 +273,235 @@ class AwesomeLegacyJournalsMigration < ActiveRecord::Migration |
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
module DbWorker |
|
|
|
|
def quote_value(name) |
|
|
|
|
ActiveRecord::Base.connection.quote name |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def quoted_table_name(name) |
|
|
|
|
ActiveRecord::Base.connection.quote_table_name name |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def db_columns(table_name) |
|
|
|
|
ActiveRecord::Base.connection.columns table_name |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def db_select_all(statement) |
|
|
|
|
ActiveRecord::Base.connection.select_all statement |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def db_execute(statement) |
|
|
|
|
ActiveRecord::Base.connection.execute statement |
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
include DbWorker |
|
|
|
|
|
|
|
|
|
class LegacyJournalMigrator |
|
|
|
|
include DbWorker |
|
|
|
|
|
|
|
|
|
attr_accessor :table_name, |
|
|
|
|
:type, |
|
|
|
|
:journable_class |
|
|
|
|
|
|
|
|
|
def initialize(type=nil, table_name=nil, &block) |
|
|
|
|
self.table_name = table_name |
|
|
|
|
self.type = type |
|
|
|
|
|
|
|
|
|
instance_eval &block if block_given? |
|
|
|
|
|
|
|
|
|
if table_name.nil? || type.nil? |
|
|
|
|
raise ArgumentError, <<-MESSAGE.split("\n").map(&:strip!).join(" ") + "\n" |
|
|
|
|
table_name and type have to be provided. Either as parameters or set within the block. |
|
|
|
|
MESSAGE |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
self.journable_class = self.type.gsub(/Journal$/, "") |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def column_names |
|
|
|
|
@column_names ||= db_columns(table_name).map(&:name) |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def migrate(legacy_journal) |
|
|
|
|
|
|
|
|
|
journaled_id, version = legacy_journal["journaled_id"], legacy_journal["version"] |
|
|
|
|
|
|
|
|
|
# turn id fields into integers. |
|
|
|
|
["id", "journaled_id", "user_id", "version"].each do |f| |
|
|
|
|
legacy_journal[f] = legacy_journal[f].to_i |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
legacy_journal["changed_data"] = YAML.load(legacy_journal["changed_data"]) |
|
|
|
|
|
|
|
|
|
# actually insert/update stuff in the database. |
|
|
|
|
journal = get_journal(journaled_id, version) |
|
|
|
|
journal_id = journal["id"] |
|
|
|
|
|
|
|
|
|
combined_journal = combine_journal(journaled_id, legacy_journal) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
existing_journal = fetch_existing_journal_data(journal_id) |
|
|
|
|
|
|
|
|
|
to_insert = combined_journal.inject({}) do |mem, (key, value)| |
|
|
|
|
if column_names.include?(key) |
|
|
|
|
# The old journal's values attribute was structured like |
|
|
|
|
# [old_value, new_value] |
|
|
|
|
# We only need the new_value |
|
|
|
|
mem[key] = value.last |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
mem |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
keys = to_insert.keys |
|
|
|
|
values = to_insert.values |
|
|
|
|
|
|
|
|
|
migrate_key_value_pairs!(keys, values, legacy_journal, journal_id) |
|
|
|
|
|
|
|
|
|
if existing_journal.size > 1 |
|
|
|
|
|
|
|
|
|
raise AmbiguousJournalsError, <<-MESSAGE.split("\n").map(&:strip!).join(" ") + "\n" |
|
|
|
|
It appears there are ambiguous journal data. Please make sure |
|
|
|
|
journal data are consistent and that the unique constraint on |
|
|
|
|
journal_id is met. |
|
|
|
|
MESSAGE |
|
|
|
|
|
|
|
|
|
elsif existing_journal.size == 0 |
|
|
|
|
db_execute <<-SQL |
|
|
|
|
INSERT INTO #{journal_table_name} (journal_id#{", " + keys.map{|k| map_key(k) }.join(", ") unless keys.empty? }) |
|
|
|
|
VALUES (#{quote_value(journal_id)}#{", " + values.map{|d| quote_value(d)}.join(", ") unless values.empty?}); |
|
|
|
|
SQL |
|
|
|
|
|
|
|
|
|
existing_journal = fetch_existing_journal_data(journal_id) |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
existing_journal = existing_journal.first |
|
|
|
|
|
|
|
|
|
sql_statements = <<-SQL |
|
|
|
|
UPDATE journals |
|
|
|
|
SET journable_data_id = #{quote_value(journal_id)}, |
|
|
|
|
journable_data_type = #{quote_value(type)}, |
|
|
|
|
user_id = #{quote_value(legacy_journal["user_id"])}, |
|
|
|
|
notes = #{quote_value(legacy_journal["notes"])}, |
|
|
|
|
created_at = #{quote_value(legacy_journal["created_at"])}, |
|
|
|
|
activity_type = #{quote_value(legacy_journal["activity_type"])} |
|
|
|
|
WHERE id = #{quote_value(journal_id)}; |
|
|
|
|
SQL |
|
|
|
|
|
|
|
|
|
sql_statements = <<-SQL + sql_statements unless keys.empty? |
|
|
|
|
UPDATE #{journal_table_name} |
|
|
|
|
SET #{(keys.each_with_index.map {|k,i| "#{map_key(k)} = #{quote_value(values[i])}"}).join(", ")} |
|
|
|
|
WHERE id = #{existing_journal["id"]}; |
|
|
|
|
SQL |
|
|
|
|
|
|
|
|
|
db_execute sql_statements |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
protected |
|
|
|
|
|
|
|
|
|
def combine_journal(journaled_id, legacy_journal) |
|
|
|
|
# compute the combined journal from current and all previous changesets. |
|
|
|
|
combined_journal = legacy_journal["changed_data"] |
|
|
|
|
if previous.journaled_id == journaled_id |
|
|
|
|
combined_journal = previous.journal.merge(combined_journal) |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
# remember the combined journal as the previous one for the next iteration. |
|
|
|
|
previous.set(combined_journal, journaled_id, type) |
|
|
|
|
|
|
|
|
|
combined_journal |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def previous |
|
|
|
|
@previous ||= PreviousState.new({}, 0, "") |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
# here to be overwritten by instances |
|
|
|
|
def migrate_key_value_pairs!(keys, values, legacy_journal, journal_id) end |
|
|
|
|
|
|
|
|
|
# fetches specific journal data row. might be empty. |
|
|
|
|
def fetch_existing_journal_data(journal_id) |
|
|
|
|
ActiveRecord::Base.connection.select_all <<-SQL |
|
|
|
|
SELECT * |
|
|
|
|
FROM #{journal_table_name} AS d |
|
|
|
|
WHERE d.journal_id = #{quote_value(journal_id)}; |
|
|
|
|
SQL |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def map_key(key) |
|
|
|
|
case key |
|
|
|
|
when "issue_id" |
|
|
|
|
"work_package_id" |
|
|
|
|
else |
|
|
|
|
key |
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def customizable_table_name |
|
|
|
|
quoted_table_name("customizable_journals") |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def attachable_table_name |
|
|
|
|
quoted_table_name("attachable_journals") |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def journal_table_name |
|
|
|
|
quoted_table_name(table_name) |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
# gets a journal row, and makes sure it has a valid id in the database. |
|
|
|
|
def get_journal(id, version) |
|
|
|
|
journal = fetch_journal(id, version) |
|
|
|
|
|
|
|
|
|
if journal.size > 1 |
|
|
|
|
|
|
|
|
|
raise AmbiguousJournalsError, <<-MESSAGE.split("\n").map(&:strip!).join(" ") + "\n" |
|
|
|
|
It appears there are ambiguous journals. Please make sure |
|
|
|
|
journals are consistent and that the unique constraint on id, |
|
|
|
|
type and version is met. |
|
|
|
|
MESSAGE |
|
|
|
|
|
|
|
|
|
elsif journal.size == 0 |
|
|
|
|
|
|
|
|
|
db_execute <<-SQL |
|
|
|
|
INSERT INTO #{quoted_journals_table_name}(journable_id, journable_type, version, created_at) |
|
|
|
|
VALUES ( |
|
|
|
|
#{quote_value(id)}, |
|
|
|
|
#{quote_value(journable_class)}, |
|
|
|
|
#{quote_value(version)}, |
|
|
|
|
#{quote_value(Time.now)} |
|
|
|
|
); |
|
|
|
|
SQL |
|
|
|
|
|
|
|
|
|
journal = fetch_journal(id, version) |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
journal.first |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
# fetches specific journal row. might be empty. |
|
|
|
|
def fetch_journal(id, version) |
|
|
|
|
db_select_all <<-SQL |
|
|
|
|
SELECT * |
|
|
|
|
FROM #{quoted_journals_table_name} AS j |
|
|
|
|
WHERE j.journable_id = #{quote_value(id)} |
|
|
|
|
AND j.journable_type = #{quote_value(journable_class)} |
|
|
|
|
AND j.version = #{quote_value(version)}; |
|
|
|
|
SQL |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
def quoted_journals_table_name |
|
|
|
|
@quoted_journals_table_name ||= quoted_table_name 'journals' |
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
class PreviousState < Struct.new(:journal, :journaled_id, :type) |
|
|
|
|
def set(journal, journaled_id, type) |
|
|
|
|
self.journal = journal |
|
|
|
|
self.journaled_id = journaled_id |
|
|
|
|
self.type = type |
|
|
|
|
end |
|
|
|
|
end |
|
|
|
|
|
|
|
|
|
end |
|
|
|
|