Compare commits

..

101 Commits

Author SHA1 Message Date
6b0b8b8c73 v0.54 Tweak trust levels 2024-08-28 14:05:15 +10:00
3fcba4d050 v0.54 Tweak trust levels 2024-08-27 08:21:12 +10:00
85e01956f9 v0.53 Set to 2 threads for PostgreSQL pool size issue and remonitor. Need to consider memory consumption aspects as well of this after OOM 2024-08-20 11:45:10 +10:00
5735b78dc7 v0.52 Improve handling and look at how we can improve ActiveRecord config for PostgreSQL and avoid insufficient pool size 2024-08-19 22:41:46 +10:00
bcc7519d78 v0.52 Improve handling and look at how we can improve ActiveRecord config for PostgreSQL and avoid insufficient pool size 2024-08-19 22:21:16 +10:00
6360e99a96 v0.52 Improve handling and look at how we can improve ActiveRecord config for PostgreSQL and avoid insufficient pool size 2024-08-19 22:09:38 +10:00
90ee7854b8 v0.52 Improve handling and look at how we can improve ActiveRecord config for PostgreSQL and avoid insufficient pool size 2024-08-19 22:04:37 +10:00
e312a4ed12 v0.52 Improve handling and look at how we can improve ActiveRecord config for PostgreSQL and avoid insufficient pool size 2024-08-19 22:01:47 +10:00
af3c46e1cf v0.52 Improve handling and look at how we can improve ActiveRecord config for PostgreSQL and avoid insufficient pool size 2024-08-19 21:33:37 +10:00
e37ba1c8a9 v0.51 Fix mysql bug 2024-08-19 20:15:35 +10:00
f00fa9d67d v0.51 Fix mysql bug 2024-08-19 20:00:59 +10:00
78749e28fc v0.51 Fix mysql bug 2024-08-19 18:50:48 +10:00
e9750621bc Revert "v0.48.6 40-40-40"
This reverts commit 6b4b728751.
2024-08-19 18:45:05 +10:00
5ef554b4e1 v0.50 Work on MySQL brokenness 2024-08-19 18:37:05 +10:00
6b4b728751 v0.48.6 40-40-40 2024-08-19 13:42:03 +10:00
7fde0fa255 v0.48.6 40-40-40 2024-08-19 13:38:43 +10:00
915961cf51 v0.48.6 40-40-40 2024-08-19 13:33:32 +10:00
9d6e1a0b7c v0.48.6 48-48-48, add handling for deleted-former user as OP poster 2024-08-19 13:26:37 +10:00
a10df09d09 v0.48.5 Batch size 1000, double councurrency numbers 2024-08-19 13:11:16 +10:00
c8203b0410 v0.48.5 Batch size 1000, double councurrency numbers 2024-08-19 12:53:23 +10:00
198401f9f3 v0.48.5 Batch size 1000, double councurrency numbers 2024-08-19 12:50:41 +10:00
fc62e223ab v0.48.5 Batch size 1000, double councurrency numbers 2024-08-19 12:45:01 +10:00
d9ae43b78b v0.48.5 Batch size 1000, double councurrency numbers 2024-08-19 12:35:51 +10:00
74d2ee7382 v0.48.4 Batch size 1000 2024-08-19 12:13:27 +10:00
e0fd933526 v0.48.3 Further attempts to address MariaDB challenges and make things as foolproof as possible. 2024-08-19 12:04:50 +10:00
f3c91267af v0.48.1 Further attempts to address MariaDB challenges and make things as foolproof as possible. 2024-08-18 20:27:36 +10:00
ad7a5f945c v0.48.1 Further attempts to address MariaDB challenges and make things as foolproof as possible. 2024-08-18 20:10:07 +10:00
6cb21584f3 v0.48.1 Further attempts to address MariaDB challenges and make things as foolproof as possible. 2024-08-18 20:02:21 +10:00
87f2733dbd v0.48.1 Further attempts to address MariaDB challenges and make things as foolproof as possible. 2024-08-18 19:36:56 +10:00
395b2e9c49 v0.48.1 Further attempts to address MariaDB challenges and make things as foolproof as possible. 2024-08-18 19:24:33 +10:00
1d442ece31 v0.48.1 Further attempts to address MariaDB challenges and make things as foolproof as possible. 2024-08-18 19:16:03 +10:00
b0f300bf8e v0.48.1 Further attempts to address MariaDB challenges and make things as foolproof as possible. 2024-08-18 18:57:22 +10:00
96c2b27250 v0.48.1 Further attempts to address MariaDB challenges and make things as foolproof as possible. 2024-08-18 18:26:16 +10:00
08f62e4288 v0.48.1 Further attempts to address MariaDB challenges and make things as foolproof as possible. 2024-08-18 17:09:46 +10:00
702d90cd34 v0.48 More attempts to address MariaDB craziness 2024-08-18 16:36:18 +10:00
eb6ec24033 v0.47.1 Fix .id nil bug 2024-08-18 16:35:52 +10:00
8caa2027cb v0.47.1 Fix .id nil bug 2024-08-18 16:16:01 +10:00
2d233fdb42 v0.47.1 Fix .id nil bug 2024-08-18 16:13:05 +10:00
3e608d599b v0.47.1 Fix .id nil bug 2024-08-18 16:08:14 +10:00
468b8b725b v0.47 Significant tweaks and workarounds to try to improve smooth operation. 2024-08-18 15:56:12 +10:00
ddf59a09a5 v0.46 Restore use of ConnectionPool, tweak threads, mysql pool size, timeout, add full stacktraces 2024-08-18 15:06:30 +10:00
18d81d3880 v0.45 SQLIte mutexes only wrap around updates now 2024-08-18 05:47:22 +10:00
f7db3e8991 v0.45 SQLIte mutexes only wrap around updates now 2024-08-18 05:30:38 +10:00
0372db9cc7 v0.45 SQLIte mutexes only wrap around updates now 2024-08-18 05:12:42 +10:00
90a2d2fa37 v0.44 Try removing connection_pool and starting MySQL within thread method and fix handle_post_attachment 2024-08-18 04:39:51 +10:00
798153c8fc v0.43 Move to connection pooling for MySQL problem 2024-08-18 03:35:41 +10:00
698c1b0f26 v0.43 Move to connection pooling for MySQL problem 2024-08-18 03:12:07 +10:00
855a0a9e46 v0.43 Move to connection pooling for MySQL problem 2024-08-18 02:41:56 +10:00
416dc9c1e0 v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-18 02:01:31 +10:00
7b3a9babcb v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-18 01:44:29 +10:00
23a65c5f5c v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-18 01:23:54 +10:00
5c648f97c1 v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-18 01:08:13 +10:00
7847784e0e v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-18 01:04:30 +10:00
2b95e0a89c v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-18 00:17:24 +10:00
d8633b5b6c v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-18 00:10:57 +10:00
b7fcb220c9 v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-17 23:57:56 +10:00
8e6edb2d3f v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-17 23:51:42 +10:00
78797c3ec1 v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-17 23:08:35 +10:00
5a0e8a7d2b v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-17 23:05:09 +10:00
ef50d344c9 v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-17 23:02:02 +10:00
6edd8a985d v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-17 22:51:02 +10:00
e217447a48 v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-17 22:47:56 +10:00
ccff9b1b64 v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-17 22:45:04 +10:00
fed7a8935e v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-17 22:29:49 +10:00
678012f76a v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-17 22:26:07 +10:00
f90d0fb6ae v0.42.1 Rewrite and simplication of concurrent-ruby support 2024-08-17 22:23:25 +10:00
b44e9baf57 v0.42 Mutex addition for SQLite (which may be very important 2024-08-17 20:34:47 +10:00
d3e6dac1de v0.42 Mutex addition for SQLite (which may be very important 2024-08-17 20:25:48 +10:00
3a1476951e v0.41.2 Several bug fixes and improvements for fully concurrent topic-post import 2024-08-17 19:08:38 +10:00
8f80fbd19f v0.41.2 Several bug fixes and improvements for fully concurrent topic-post import 2024-08-17 19:04:57 +10:00
7faa849198 v0.41.2 Several bug fixes and improvements for fully concurrent topic-post import 2024-08-17 18:56:30 +10:00
29f66c1e60 v0.41.2 Several bug fixes and improvements for fully concurrent topic-post import 2024-08-17 18:52:51 +10:00
4f608ce87f v0.41.1 Fixes for FULL concurrency support 2024-08-17 17:09:36 +10:00
8dd8e2f72e v0.41 Further improve FULL concurrency support, for both MySQL/MariaDB _and_ importantly, the PostGreSQL Discourse DB additions and changes with ActiveRecord connection pooling and Mutex 2024-08-17 16:34:46 +10:00
5e2d2e78e5 v0.41 Further improve FULL concurrency support, for both MySQL/MariaDB _and_ importantly, the PostGreSQL Discourse DB additions and changes with ActiveRecord connection pooling and Mutex 2024-08-17 16:16:53 +10:00
2966e1c9f9 v0.41 Further improve FULL concurrency support, for both MySQL/MariaDB _and_ importantly, the PostGreSQL Discourse DB additions and changes with ActiveRecord connection pooling and Mutex 2024-08-17 16:09:57 +10:00
a005cda0ae v0.41 Further improve FULL concurrency support, for both MySQL/MariaDB _and_ importantly, the PostGreSQL Discourse DB additions and changes with ActiveRecord connection pooling and Mutex 2024-08-17 15:55:22 +10:00
168dcc9db7 v0.40 Move to per thread MySQL/MariaDB connection 2024-08-17 04:41:53 +10:00
35b97db341 v0.40 Move to per thread MySQL/MariaDB connection 2024-08-17 04:33:57 +10:00
c11055d0f4 v0.39.7 Fixes/tweaks in calculate_dynamic_pool_size 2024-08-17 03:27:17 +10:00
1f09322207 v0.39.7 Fixes/tweaks in calculate_dynamic_pool_size 2024-08-17 03:09:40 +10:00
524fae9283 v0.39.7 Fixes/tweaks in calculate_dynamic_pool_size 2024-08-17 03:06:07 +10:00
77930217df v0.39.6 Fixes/tweaks in calculate_dynamic_pool_size 2024-08-17 02:56:46 +10:00
e5e6dff339 v0.39.5 Add SQLite table for tracking successful post importation; Split out user import into three separate callable methods; require sys/proctable 2024-08-17 02:47:22 +10:00
061aaba9fe v0.39.4 Add SQLite table for tracking successful post importation; Split out user import into three separate callable methods; require sys/proctable 2024-08-17 02:22:25 +10:00
44d673ffaa v0.39.3 Add SQLite table for tracking successful post importation; Split out user import into three separate callable methods; require sys/proctable 2024-08-17 02:16:15 +10:00
5b6f2c3c0e v0.39.2 Add SQLite table for tracking successful post importation; Split out user import into three separate callable methods; require sys/proctable 2024-08-17 01:57:32 +10:00
ea1dae98d7 v0.39.1 Add SQLite table for tracking successful post importation; Split out user import into three separate callable methods; require sys/proctable 2024-08-17 01:52:45 +10:00
5002b90712 v0.39 Add SQLite table for tracking successful post importation; Split out user import into three separate callable methods; require sys/proctable 2024-08-17 01:06:25 +10:00
7b0a45cb89 v0.38 Attempt to reimplement with mutex support for interrupting and resuming during post import. This will have to be tested / validated 2024-08-16 22:20:52 +10:00
36037ea05a v0.37.1 Huge move to threaded support for topic-post import; disable topic-post import for current user re-import 202408162130, persistence for exported files 2024-08-16 21:34:02 +10:00
84162b1b78 v0.37 Huge move to threaded support for topic-post import; disable topic-post import for current user re-import 202408162130 2024-08-16 21:30:56 +10:00
3d7e5701ef Merge pull request 'ross-improvements' (#1) from ross-improvements into main
Reviewed-on: #1
2024-08-16 08:56:18 +00:00
f3b1f0416d added suggestion for thread count tweaking 2024-08-16 08:56:18 +00:00
16c261bede index out of range issue fixed 2024-08-16 08:56:18 +00:00
b73be6d27f concurrency outline 2024-08-16 08:56:18 +00:00
0ad1482f95 optimized main query for post import 2024-08-16 08:56:18 +00:00
4ad23d19b7 prep for 20240816 import run 2024-08-16 03:17:06 +10:00
cace4e88ee goss-cleanup.rb and goss-delalluser-20240527.rb fixes 2024-08-16 02:53:45 +10:00
e9e5e46b43 goss-cleanup.rb and goss-delalluser-20240527.rb fixes 2024-08-16 02:49:02 +10:00
ab0449e2ef v0.36.8 Move SQLite3 DB to persistent storage /bitnami/discourse/sqlite 2024-08-16 02:41:35 +10:00
5 changed files with 1346 additions and 67 deletions

Binary file not shown.

View File

@ -1,5 +1,10 @@
# v0.13
require File.expand_path("../../../config/environment", __FILE__)
# Federated Computer, Inc.
# David Sainty <saint@federated.computer> 2024 A.D.
# Gossamer Threads to Discourse -- CleanUp Script
# v0.16 Add parallel deletion of posts.
require 'concurrent-ruby'
require File.expand_path("../../../../config/environment", __FILE__)
class GossamerForumsCleaner
def cleanup_users
@ -33,10 +38,38 @@ class GossamerForumsCleaner
topic = Topic.find_by(id: field.topic_id)
if topic
puts "Deleting topic #{topic.title} (ID: #{topic.id})"
# topic.posts.each do |post|
# puts "Deleting post #{post.id} in topic #{topic.id}"
# post.destroy
# end
topic.destroy
end
end
end
def cleanup_topics_former_user
puts "Cleaning up imported topics..."
# Find all topics that were imported
TopicCustomField.where(name: 'original_gossamer_id').each do |field|
topic = Topic.find_by(id: field.topic_id)
next unless topic
# Fetch the first post in the topic
first_post = topic.posts.order(:created_at).first
# Check if the first post has user_id 2
if first_post && first_post.user_id == 2
puts "Deleting topic #{topic.title} (ID: #{topic.id})"
# Destroy all posts in the topic
topic.posts.each do |post|
puts "Deleting post #{post.id} in topic #{topic.id}"
post.destroy
end
# Destroy the topic itself
topic.destroy
end
end
@ -54,6 +87,30 @@ class GossamerForumsCleaner
end
end
def cleanup_posts_parallel
puts "Cleaning up imported posts..."
# Define the number of threads to use
num_threads = 8
pool = Concurrent::FixedThreadPool.new(num_threads)
PostCustomField.where(name: 'original_gossamer_id').in_batches(of: 1000) do |batch|
batch.each do |field|
pool.post do
post = Post.find_by(id: field.post_id)
if post
puts "Deleting post #{post.id} (ID: #{post.id})"
post.destroy
end
end
end
end
# Wait for all threads to complete
pool.shutdown
pool.wait_for_termination
end
def cleanup_messages
puts "Cleaning up imported personal messages..."
# Find all personal messages (inbox) that were imported and delete them
@ -73,9 +130,10 @@ class GossamerForumsCleaner
def perform_cleanup
puts "Cleanup beginning!"
# cleanup_messages
cleanup_posts
cleanup_topics
cleanup_categories
# cleanup_topics
cleanup_topics_former_user
# cleanup_posts_parallel
# cleanup_categories
# cleanup_users
puts "Cleanup complete!"
end

View File

@ -1,8 +1,8 @@
# Load the Discourse environment
require File.expand_path("../../../config/environment", __FILE__)
require File.expand_path("../../../../config/environment", __FILE__)
# Define usernames to exclude from deletion
excluded_usernames = ["saint", "discobot", "system"]
excluded_usernames = ["saint", "discobot", "system","admin"]
# Find all users except the excluded ones
users_to_delete = User.where.not(username: excluded_usernames)

View File

@ -1,7 +1,7 @@
# Federated Computer, Inc.
# David Sainty <saint@federated.computer> 2024 A.D.
# Gossamer Threads to Discourse -- Migration-Import Script
# v0.36.7 Tweak for re-running as of 20240816
# v0.54 Tweak trust levels
require 'mysql2'
require 'open-uri'
@ -16,6 +16,11 @@ require 'time'
require 'concurrent'
require 'sys/proctable'
require 'active_record'
require 'connection_pool'
require File.expand_path("../../../../config/environment", __FILE__)
# require_relative '../base'
require File.expand_path("../../../../script/import_scripts/base", __FILE__)
@ -24,6 +29,7 @@ class GossamerForumsImporter < ImportScripts::Base
def initialize
super
begin
# Initialize MySQL client to connect to Gossamer Forums database
@mysql_client = Mysql2::Client.new(
host: "slowtwitch.northend.network",
@ -42,19 +48,53 @@ class GossamerForumsImporter < ImportScripts::Base
end
def initialize_sqlite_id_name_url_db
@db = SQLite3::Database.new 'id_name_url_map.db'
@db = SQLite3::Database.new '/bitnami/discourse/sqlite/id_name_url_map.db'
###### ONLY when we need to clear the url_map and topic_import_status .... e.g. if reimporting topics-posts from scratch
# @db.execute <<-SQL
# DROP TABLE IF EXISTS url_map;
# SQL
# @db.execute <<-SQL
# DROP TABLE IF EXISTS topic_last_post_time;
# SQL
# @db.execute <<-SQL
# DROP TABLE IF EXISTS topic_post_count;
# SQL
# @db.execute <<-SQL
# DROP TABLE IF EXISTS user_topic_count;
# SQL
# @db.execute <<-SQL
# DROP TABLE IF EXISTS user_post_count;
# SQL
# @db.execute <<-SQL
# DROP TABLE IF EXISTS topic_last_post_user;
# SQL
# @db.execute <<-SQL
# DROP TABLE IF EXISTS topic_post_numbers;
# SQL
# @db.execute <<-SQL
# DROP TABLE IF EXISTS highest_processed_post_id;
# SQL
# @db.execute <<-SQL
# DROP TABLE IF EXISTS topic_import_status;
# SQL
#######
# USER IMPORT - map of old to new user ids, used for topic-post import.
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS user_id_map (
old_user_id INTEGER PRIMARY KEY,
new_user_id INTEGER
);
SQL
# CATEGORY IMPORT - map of old to new category ids, used for topic-post import.
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS category_id_map (
old_category_id INTEGER PRIMARY KEY,
new_category_id INTEGER
);
SQL
# USER IMPORT - map of old to new usernames for SENDING MIGRATION EMAIL
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS username_map (
id INTEGER PRIMARY KEY,
@ -64,12 +104,7 @@ class GossamerForumsImporter < ImportScripts::Base
real_name TEXT
);
SQL
# ONLY when we need to clear the url_map e.g. if reimporting topics-posts from scratch
# @db.execute <<-SQL
# DROP TABLE IF EXISTS url_map;
# SQL
# POST IMPORT - Generate a map of old_post_id (Gossamer) and the new URL for a WEB SERVER REDIRECT FILE
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS url_map (
old_post_id INTEGER PRIMARY KEY,
@ -77,55 +112,69 @@ class GossamerForumsImporter < ImportScripts::Base
title TEXT
);
SQL
# POST IMPORT - For each topic, the time of the last / most recent post/reply
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS topic_last_post_time (
topic_id INTEGER PRIMARY KEY,
last_post_time INTEGER
);
SQL
# POST IMPORT - For each topic, increment post_count as we add posts.
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS topic_post_count (
topic_id INTEGER PRIMARY KEY,
post_count INTEGER DEFAULT 0
);
SQL
# POST IMPORT - For each user (_id), increment topic_count as we add topics (to see total topics per user)
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS user_topic_count (
user_id INTEGER PRIMARY KEY,
topic_count INTEGER DEFAULT 0
);
SQL
# POST IMPORT - For each user (_id), increment post_count as we add posts (to see total posts per user)
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS user_post_count (
user_id INTEGER PRIMARY KEY,
post_count INTEGER DEFAULT 0
);
SQL
# POST IMPORT - For each topic, the user_id for the last poster / replier
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS topic_last_post_user (
topic_id INTEGER PRIMARY KEY,
user_id INTEGER
);
SQL
# POST IMPORT - The number of posts in a given topic, incremented as we add a new reply post to a topic.
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS topic_post_numbers (
topic_id INTEGER PRIMARY KEY,
post_number INTEGER DEFAULT 0
);
SQL
# POST IMPORT - Record perssitent integer value for highest processed post id -- not used
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS highest_processed_post_id (
id INTEGER PRIMARY KEY CHECK (id = 1),
post_id INTEGER
);
SQL
# PERSONAL MESSAGE IMPORT - Record perssitent integer value for highest processed personal id
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS highest_processed_personal_id (
id INTEGER PRIMARY KEY CHECK (id = 1),
personal_id INTEGER
);
SQL
# POST IMPORT - For each topic (topic post ID) record status 0 fail or 1 success
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS topic_import_status (
post_id INTEGER PRIMARY KEY,
status INTEGER DEFAULT 0
);
SQL
end
def insert_user_id_mapping(old_user_id, new_user_id)
@ -145,7 +194,7 @@ class GossamerForumsImporter < ImportScripts::Base
end
def insert_username_mapping(old_username, new_username, email, real_name)
@db.execute "INSERT INTO username_map (old_username, new_username, email, real_name) VALUES (?, ?, ?, ?)", old_username, new_username, email, real_name
@db.execute "INSERT OR REPLACE INTO username_map (old_username, new_username, email, real_name) VALUES (?, ?, ?, ?)", old_username, new_username, email, real_name
end
# Define a method to export the username mapping table to a CSV file
@ -164,7 +213,7 @@ class GossamerForumsImporter < ImportScripts::Base
# Insert a URL mapping into the SQLite database
def insert_url_mapping(old_post_id, new_url, title)
@db.execute "INSERT INTO url_map (old_post_id, new_url, title) VALUES (?, ?, ?)", [old_post_id, new_url, title]
@db.execute "INSERT OR REPLACE INTO url_map (old_post_id, new_url, title) VALUES (?, ?, ?)", [old_post_id, new_url, title]
end
# Export the URL mappings to a CSV file
@ -271,11 +320,32 @@ class GossamerForumsImporter < ImportScripts::Base
@db.execute "INSERT OR REPLACE INTO highest_processed_personal_id (id, personal_id) VALUES (1, ?)", personal_id
end
# Check if post_id exists and its status
def fetch_post_status(post_id)
result = @db.execute("SELECT status FROM topic_import_status WHERE post_id = ?", post_id).flatten.first
result.nil? ? nil : result.to_i
end
# Mark post_id as complete
def mark_post_as_complete(post_id)
@db.execute("INSERT OR REPLACE INTO topic_import_status (post_id, status) VALUES (?, 1)", post_id)
end
# Mark post_id as failed
def mark_post_as_failed(post_id)
@db.execute("INSERT OR REPLACE INTO topic_import_status (post_id, status) VALUES (?, 0)", post_id)
end
# Execute an SQL query on the Gossamer Forums database
def execute_query(query)
@mysql_client.query(query, as: :hash)
end
# Execute an SQL query on the Gossamer Forums database
def execute_query_concurrent(query, mysql_client)
mysql_client.query(query, as: :hash)
end
# Sanitize the username to meet Discourse's requirements
def sanitize_username(original_username, email, real_name)
# original_username = username
@ -436,6 +506,7 @@ class GossamerForumsImporter < ImportScripts::Base
upload
rescue => e
puts "FAILURE: Failed to upload attachment #{filename} for user_id #{user_id}: #{e.message}"
puts e.backtrace.join("\n") # Print the full stack trace
nil
end
end
@ -451,14 +522,15 @@ class GossamerForumsImporter < ImportScripts::Base
upload
rescue => e
puts "Failed to upload attachment #{filename} for user_id #{user_id}: #{e.message}"
puts e.backtrace.join("\n") # Print the full stack trace
nil
end
end
# Helper method to handle post attachments
def handle_post_attachments(gossamer_post_id, post, user_id)
execute_query("SELECT * FROM gforum_PostAttachment WHERE post_id_fk = #{gossamer_post_id}").each do |att_row|
def handle_post_attachments(gossamer_post_id, post, user_id, mysql_client)
execute_query_concurrent("SELECT * FROM gforum_PostAttachment WHERE post_id_fk = #{gossamer_post_id}", mysql_client).each do |att_row|
attachment_url = "https://forum.slowtwitch.com/forum/?do=post_attachment;postatt_id=#{att_row['postatt_id']}"
puts "Handling attachment: #{attachment_url}"
attachment_data = download_attachment(attachment_url)
@ -504,6 +576,7 @@ class GossamerForumsImporter < ImportScripts::Base
# file
# rescue => e
# puts "Failed to download file from #{url}: #{e.message}"
# puts e.backtrace.join("\n") # Print the full stack trace
# nil
# end
# end
@ -547,7 +620,7 @@ class GossamerForumsImporter < ImportScripts::Base
# Import users from Gossamer Forums to Discourse
def import_users
puts "Importing users..."
puts "Importing Users..."
users = []
# Fetch all users from Gossamer Forums
@ -576,22 +649,88 @@ class GossamerForumsImporter < ImportScripts::Base
# insert_user_id_mapping(user[:id], user.id)
user
end
end
# For each user, add user ID mapping to SQLite now that we know what the Discourse user ID is, ... and append user bio and import user files
# Generate SQLite user ID mapping between Discourse and Gossamer
def generate_user_id_mapping
puts "Generating User ID Mapping..."
users = []
# Fetch all users from Gossamer Forums
execute_query("SELECT * FROM gforum_User").each do |row|
users << {
id: row['user_id'],
username: sanitize_username(row['user_username'], row['user_email'], row['user_real_name']),
email: row['user_email'],
created_at: Time.at(row['user_registered']),
updated_at: Time.at(row['user_last_seen']),
name: row['user_real_name'],
title: row['user_title'],
bio_raw: row['user_about'] || "",
website: row['user_homepage'],
location: row['user_location'],
custom_fields: {
md5_password: row['user_password'],
original_username: row['user_username'],
original_gossamer_id: row['user_id']
}
}
end
# For each user, add user ID mapping to SQLite now that we know what the Discourse user ID is
users.each do |user|
# discourse_username = sanitize_username(user[:username], user[:email], user[:name])
discourse_username = user[:username]
discourse_user = User.find_by(username: discourse_username)
if discourse_user.nil?
puts "User #{user[:username]} --> #{discourse_username} not found in Discourse. Skipping file import."
puts "User #{user[:username]} --> #{discourse_username} not found in Discourse. Skipping user mapping addition."
next
end
# # Store the user ID mapping
# Store the user ID mapping
# @user_id_map[user[:id]] = discourse_user.id
puts "for insert_user_id_mapping: user[:id] #{user[:id]} discourse_user.id #{discourse_user.id}"
insert_user_id_mapping(user[:id], discourse_user.id)
end
end
# Import and set user Bio and Images
def set_user_bio_images
puts "Setting User Bio and Images..."
users = []
# Fetch all users from Gossamer Forums
execute_query("SELECT * FROM gforum_User").each do |row|
users << {
id: row['user_id'],
username: sanitize_username(row['user_username'], row['user_email'], row['user_real_name']),
email: row['user_email'],
created_at: Time.at(row['user_registered']),
updated_at: Time.at(row['user_last_seen']),
name: row['user_real_name'],
title: row['user_title'],
bio_raw: row['user_about'] || "",
website: row['user_homepage'],
location: row['user_location'],
custom_fields: {
md5_password: row['user_password'],
original_username: row['user_username'],
original_gossamer_id: row['user_id']
}
}
end
# For each user, append user bio and import user files
users.each do |user|
# discourse_username = sanitize_username(user[:username], user[:email], user[:name])
discourse_username = user[:username]
discourse_user = User.find_by(username: discourse_username)
if discourse_user.nil?
puts "User #{user[:username]} --> #{discourse_username} not found in Discourse. Skipping bio-image setting."
next
end
# Ensure user profile exists and bio_raw is a string
discourse_user.user_profile ||= UserProfile.new(user_id: discourse_user.id)
@ -656,6 +795,7 @@ class GossamerForumsImporter < ImportScripts::Base
png_path if File.exist?(png_path)
rescue => e
puts "Failed to convert image #{file_path}: #{e.message}"
puts e.backtrace.join("\n") # Print the full stack trace
nil
end
end
@ -668,6 +808,7 @@ class GossamerForumsImporter < ImportScripts::Base
resized_path if File.exist?(resized_path)
rescue => e
puts "Failed to resize image #{file_path}: #{e.message}"
puts e.backtrace.join("\n") # Print the full stack trace
nil
end
end
@ -759,6 +900,7 @@ class GossamerForumsImporter < ImportScripts::Base
FileUtils.copy_file(resized_image_path, resized_temp_file.path)
rescue => e
puts "Skipping image due to resize failure: #{temp_file.path}"
puts e.backtrace.join("\n") # Print the full stack trace
temp_file.close
temp_file.unlink
next
@ -922,55 +1064,469 @@ class GossamerForumsImporter < ImportScripts::Base
end
# Fetch post views from the gforum_PostView table
def fetch_post_views(post_id)
result = execute_query("SELECT post_views FROM gforum_PostView WHERE post_id_fk = #{post_id} LIMIT 1").first
def fetch_post_views(post_id, mysql_client)
result = execute_query_concurrent("SELECT post_views FROM gforum_PostView WHERE post_id_fk = #{post_id} LIMIT 1", mysql_client).first
result ? result['post_views'] : 0
end
#THREADING OUTLINE HERE --------------------------------------------
########## THREADING START #############################################################################
# Get list of TOPICS / OP posts, i.e. post ids that have no parent / root id - SELECT post_id FROM gforum_Post WHERE post_root_id = 0;
def threaded_topic_import
# Get list of IDS that have no parent ID - SELECT post_id FROM gforum_Post WHERE post_root_id = 0;
parent_post_ids = execute_query("SELECT post_id FROM gforum_Post WHERE post_root_id = 0")
parent_post_count = parent_post_ids.count
batch_size = 100 #set our batch size
current_post_batch = 0 #set our current batch number
is_complete = false
## # Update connection pool settings
## ActiveRecord::Base.establish_connection(
## ActiveRecord::Base.connection_db_config.configuration_hash.merge(pool: 40, timeout: 5000)
## )
### # Define the custom connection pool settings
### custom_pool = ActiveRecord::ConnectionAdapters::ConnectionPool.new(
### ActiveRecord::Base.connection_pool.spec.to_h.merge(pool: 40, timeout: 5000)
### )
### # Register the custom connection pool under a unique identifier
### ActiveRecord::Base.connection_handler.connection_pools['CustomPool'] = custom_pool
# Use CachedThreadPool for dynamic thread management
#### pool = Concurrent::CachedThreadPool.new
###### pool = Concurrent::FixedThreadPool.new(7)
pool = Concurrent::FixedThreadPool.new(2)
# Define the connection pool inside the method
###### mariadb_pool = ConnectionPool.new(size: 14, timeout: 100) do
#### mariadb_pool = ConnectionPool.new(size: 40, timeout: 100) do
#### Mysql2::Client.new(
#### host: "slowtwitch.northend.network",
#### username: "admin",
#### password: "yxnh93Ybbz2Nm8#mp28zCVv",
#### database: "slowtwitch"
#### )
#### end
# The query selects post_ids from gforum_Post where post_root_id is 0, meaning these posts are the topic starters (OPs).
# Execute the query and fetch the result
# result = execute_query("SELECT post_id FROM gforum_Post WHERE post_root_id = 0 ORDER BY post_id ASC")
result = execute_query("SELECT post_id FROM gforum_Post WHERE post_root_id = 0 ORDER BY post_id ASC")
# Convert the result set to an array of post_ids
parent_post_ids = result.map { |row| row['post_id'] }
# parent_post_count = parent_post_ids.count
batch_size = 10 # Set our batch size for number of posts to import in a single batch
#### current_post_batch = 0 # Set our current batch number. This tracks the current batch of posts being processed.
is_complete = false # Flag to indicate whether the import process is complete.
# Mutex to control access to shared resources
### mutex = Mutex.new # Mutex for MySQL2 operations -- disabled as this may not in fact be necessary - TBD.
sqlite_mutex = Mutex.new # Mutex for SQLite opreations
# Run until all posts have been processed.
until is_complete
# Query in batches, create pool, wait for termination, do it again
# SELECT post_id FROM gforum_Post WHERE post_root_id = 0 ORDER BY post_id
current_post_batch_max = current_post_batch + batch_size
pool = Concurrent::FixedThreadPool.new(Concurrent.processor_count) #create thread pool that is bounded by processors avaialable, however play with the number to see what works best
while current_post_batch < current_post_batch_max
post_id = parent_post_ids[current_post_batch]
# puts "QQ 11 -- GETTING NEXT BATCH ****************************************"
#### # Query in batches, create pool, wait for termination, do it again
#### current_post_batch_max = current_post_batch + batch_size
# Get the next batch of posts
current_post_batch = parent_post_ids.shift(batch_size)
break if current_post_batch.empty?
# Process each post in the current batch
current_post_batch.each do |post_id|
# puts "QQ 22 -- #{post_id}"
####### # Static pool size based on number of CPUs
# # pool = Concurrent::FixedThreadPool.new(Concurrent.processor_count) # Create a thread pool that is bounded by processors avaialable
# # pool = Concurrent::FixedThreadPool.new(8) # Create a thread pool of 8 pool members
#### # Dynamically calculate the pool size based on system load to optimise performance
#### pool_size = calculate_dynamic_pool_size # Dynamically calculate what the pool size "ought" to be.
#### pool = Concurrent::FixedThreadPool.new(pool_size) # Create a thread pool with the calculated size
#### # Process each post in the current batch
#### while current_post_batch < current_post_batch_max
#### post_id = parent_post_ids[current_post_batch] # Fetch the post_id for the current post
#### # Check if the post has already been processed or is incomplete
#### post_status = fetch_post_status(post_id)
# Submit the import job for the current post_id to the thread pool
pool.post do
topic_import_job(post_id)
#### puts "PP 22 -- #{post_id}"
retries = 0
begin
####### mariadb_pool.with do |mysql_client|
# Initialise a new MariaDB / Mysql2 client inside of each thread
mysql_client = Mysql2::Client.new(
host: "slowtwitch.northend.network",
username: "admin",
password: "yxnh93Ybbz2Nm8#mp28zCVv",
database: "slowtwitch"
)
# Ensure the connection is active, otherwise reconnect
puts "PP 11 -- #{post_id} -- Checking MySQL connections status.."
mysql_client.ping || mysql_client = Mysql2::Client.new(
host: "slowtwitch.northend.network",
username: "admin",
password: "yxnh93Ybbz2Nm8#mp28zCVv",
database: "slowtwitch"
)
puts " FIRST Checking MySQL connection status..."
if mysql_client.query('SELECT 1').nil?
puts " !! MySQL connection is NOT VALID !!"
else
puts " MySQL connection is valid"
end
# Use connection pooling for PostgreSQL and synchronize access to shared resources
ActiveRecord::Base.connection_pool.with_connection do
# ActiveRecord::Base.connected_to(pool: 'CustomPool') do
## ActiveRecord::Base.connected_to(role: :writing) do
post_status = fetch_post_status(post_id)
if post_status.nil? || post_status == 0
puts "Starting import for post_id #{post_id}"
topic_import_job(post_id, sqlite_mutex, mysql_client) # Import topic and its replies
sqlite_mutex.synchronize do
mark_post_as_complete(post_id) # Mark as complete in SQLite table
end
else
puts "Skipping post_id #{post_id}, already processed."
end
end
####### end
rescue => e
puts "Error processing post ID #{post_id}: #{e.message}"
puts e.backtrace.join("\n") # Print the full stack trace
sqlite_mutex.synchronize do
mark_post_as_failed(post_id)
end
case e.message
when /MySQL client is not connected/, /This connection is in use by/
puts "Lost MySQL, retrying for post ID #{post_id}..."
# Add reconnection attempt again here... if it proves necessary?
retries += 1
if retries < 5
sleep(1)
retry
else
puts "Max retries reached for post ID #{post_id}"
end
when /could not obtain a connection from the pool/
puts "Connection pool exhausted, retrying for post ID #{post_id}..."
retries += 1
if retries < 5
sleep(1)
retry
else
puts "Max retries reached for post ID #{post_id}"
end
end
ensure
# Ensure the MariaDB connection is closed after processing
mysql_client.close if mysql_client
puts "** CLOSED MariaDB client"
end
end
current_post_batch += 1
break if current_post_batch >= parent_post_count
end
pool.shutdown
pool.wait_for_termination
if current_post_batch >= parent_post_count
is_complete = true
end
#### current_post_batch += 1 # Increment, moving to next post in the batch
#### break if current_post_batch >= parent_post_count
# Check if all posts have been processed
#### is_complete = true if current_post_batch >= parent_post_count
is_complete = parent_post_ids.empty?
end
# Wait for all jobs in the current batch to finish before proceeding
puts "PP 33 -- Ready for shutdown"
pool.shutdown # Initiate thread pool shutdown after all jobs submitted
puts "PP 44 -- Now wait for termination"
pool.wait_for_termination # Wait for all threads to finish exec
end
def topic_import_job(post_id)
#Here is where you can import the entire topic
#Get post -- SELECT post_id, user_id_fk, forum_id_fk, post_root_id, post_subject, post_time, post_message, post_father_id, post_replies FROM gforum_Post WHERE post_id = post_id
#check if exists, create if not
#get children, create -- SELECT post_id, user_id_fk, forum_id_fk, post_root_id, post_subject, post_time, post_message, post_father_id, post_replies FROM gforum_Post WHERE post_root_id = post_id
#this parts needs to be synchronously to avoid race conditions
# # Method to ensure thread-safe updates to highest_processed_post_id
# def update_highest_processed_post_id_thread_safe(post_id)
# @highest_processed_mutex ||= Mutex.new
# @highest_processed_mutex.synchronize do
# if post_id > fetch_highest_processed_post_id
# update_highest_processed_post_id(post_id)
# end
# end
# end
# Method to import an entire topic, including its first post and all subsequent replies
def topic_import_job(post_id, sqlite_mutex, mysql_client)
##### def topic_import_job(post_id, sqlite_mutex)
puts "TIJ ZZ post_id #{post_id}"
##### mysql_client = Mysql2::Client.new(
##### host: "slowtwitch.northend.network",
##### username: "admin",
##### password: "yxnh93Ybbz2Nm8#mp28zCVv",
##### database: "slowtwitch"
##### )
puts " FIRST Checking MySQL connection status..."
if mysql_client.query('SELECT 1').nil?
puts " MySQL connection is not valid, TRY TO RECONNECT II"
mysql_client.ping
# || mysql_client = Mysql2::Client.new(
# host: "slowtwitch.northend.network",
# username: "admin",
# password: "yxnh93Ybbz2Nm8#mp28zCVv",
# database: "slowtwitch"
# )
else
puts " MySQL connection is valid"
end
puts "TIJ AA post_id #{post_id}"
# Fetch the post data for the given post_id (this is the first post in the topic)
row = execute_query_concurrent("SELECT post_id, user_id_fk, forum_id_fk, post_root_id, post_subject, post_time, post_message, post_father_id, post_replies FROM gforum_Post WHERE post_id = #{post_id}", mysql_client).first
puts "TIJ BB post_id #{post_id}"
# Early return if the post data is not found
return unless row
puts "TIJ CC post_id #{post_id}"
# Extract key values from the fetched row
post_id = row['post_id'].to_i
puts "Processing post_id #{row['post_id']} post_root_id #{row['post_root_id']} post_subject/title #{row['post_subject']} forum_id_fk/category_id #{row['forum_id_fk']}"
# Fetch the mapped Discourse user and category ID based on Gossamer data
discourse_user_id = fetch_user_id_mapping(row['user_id_fk'])
# Check to be certain user has not been deleted, etc.
if discourse_user_id.nil? || discourse_user_id == 0
puts "discourse_user_id is NIL/ZERO for post_id #{row['post_id']}"
discourse_former_user = User.find_by(username: 'Former_User')
discourse_user_id = discourse_former_user.id
puts "discourse_user_id is NOW Former_User id #{discourse_user_id} for post_id #{row['post_id']}"
end
# Fetch the mapped Discourse user and category ID based on Gossamer data
discourse_category_id = fetch_category_id_mapping(row['forum_id_fk'])
puts "discourse_user_id #{discourse_user_id} discourse_category_id #{discourse_category_id}"
return unless discourse_user_id && discourse_category_id
puts "TIJ DD post_id #{post_id}"
# Ensure the topic title is valid and generate a unique title if needed
title = ensure_valid_title(row['post_subject'])
unique_title = title
# Fetch the number of views the post has had
post_views = fetch_post_views(row['post_id'], mysql_client)
# Check if the topic has already been imported using the custom field 'original_gossamer_id'
unless TopicCustomField.exists?(name: 'original_gossamer_id', value: row['post_id'])
puts "TIJ EE post_id #{post_id}"
ActiveRecord::Base.transaction do
# Create the new topic in Discourse
begin
suffix = 1
topic_created = false
## ActiveRecord::Base.transaction do
while !topic_created
begin
puts "TIJ FF post_id #{post_id}"
puts "CREATE TOPIC unique_title #{unique_title} title #{title} discourse_user_id #{discourse_user_id} category_id #{discourse_category_id}"
topic = Topic.create!(
title: unique_title,
user_id: discourse_user_id,
created_at: Time.at(row['post_time']),
updated_at: Time.at(row['post_time']),
category_id: discourse_category_id,
views: post_views || 0,
posts_count: 0
)
topic.custom_fields['original_gossamer_id'] = row['post_id']
topic.save!
topic_created = true
# rescue ActiveRecord::RecordInvalid => e
rescue => e
if e.message.include?("Title has already been used")
unique_title = "#{title} (#{suffix})"
suffix += 1
else
raise e
end
# puts e.backtrace.join("\n") # Print the full stack trace
end
end
## end
# Workaround... take a copy of topic.id
current_topic_id = topic.id
sqlite_mutex.synchronize do
# Update the database with the last post time and user for the topic
update_db_topic_last_post_time(current_topic_id, Time.at(row['post_time']).to_i)
update_db_topic_last_post_user(current_topic_id, discourse_user_id)
# Increment the topic count for the user
update_db_user_topic_count(discourse_user_id, fetch_db_user_topic_count(discourse_user_id).to_i + 1)
end
# Sanitize and prepare the post message for Discourse
sanitized_post_message = sanitize_post_message(row['post_message'])
puts "CREATE TOPIC POST for current_topic_id #{current_topic_id} discourse_user_id #{discourse_user_id}"
post_number = 1
# Increment the post count for the topic
# This is a first post... post_number = fetch_db_topic_post_numbers(current_topic_id).to_i + 1
sqlite_mutex.synchronize do
update_db_topic_post_numbers(current_topic_id, post_number)
end
puts "TIJ GG post_id #{post_id}"
## ActiveRecord::Base.transaction do
# Create the initial post in the new topic
post = Post.create!(
topic_id: current_topic_id,
user_id: discourse_user_id,
raw: sanitized_post_message,
created_at: Time.at(row['post_time']),
updated_at: Time.at(row['post_time']),
reads: post_views || 0,
post_number: post_number
)
post.custom_fields['original_gossamer_id'] = row['post_id']
post.save!
## end
sqlite_mutex.synchronize do
# Increment the post count for the topic and user
update_db_topic_post_count(current_topic_id, fetch_db_topic_post_count(current_topic_id).to_i + 1)
update_db_user_post_count(discourse_user_id, fetch_db_user_post_count(discourse_user_id).to_i + 1)
end
puts "TIJ HH post_id #{post_id}"
## ActiveRecord::Base.transaction do
# Handle any attachments associated with the post
handle_post_attachments(row['post_id'], post, discourse_user_id, mysql_client)
## end
# Create URL mappings for the new topic
new_url = "https://new/t/#{topic.slug}/#{current_topic_id}"
sqlite_mutex.synchronize do
insert_url_mapping(row['post_id'], new_url, unique_title)
end
# Fetch and import all replies to this topic
replies = execute_query_concurrent("SELECT post_id, user_id_fk, post_message, post_time FROM gforum_Post WHERE post_root_id = #{post_id} ORDER BY post_time ASC", mysql_client)
# Import each reply sequentially
replies.each do |reply_row|
## begin
# Fetch the discourse user ID for the reply
reply_user_id = fetch_user_id_mapping(reply_row['user_id_fk'])
if reply_user_id.nil? || reply_user_id == 0
puts "reply_user_id is NIL/ZERO for reply post_id #{reply_row['post_id']}"
former_user = User.find_by(username: 'Former_User')
reply_user_id = former_user.id
puts "reply_user_id is NOW Former_User id #{reply_user_id} for reply post_id #{reply_row['post_id']}"
end
puts "TIJ II post_id #{post_id}"
# Sanitize and prepare the reply message for Discourse
sanitized_reply_message = sanitize_post_message(reply_row['post_message'])
puts "CREATE REPLY in current_topic_id #{current_topic_id} for reply post_id #{reply_row['post_id']}"
### def get_topic_id
### return topic.id
### end
# Increment the post count for the topic
post_number = fetch_db_topic_post_numbers(current_topic_id).to_i + 1
sqlite_mutex.synchronize do
update_db_topic_post_numbers(current_topic_id, post_number)
### update_db_topic_post_numbers(get_topic_id, post_number)
end
# Fetch the number of views the post has had
reply_post_views = fetch_post_views(reply_row['post_id'], mysql_client)
# crazy sanity check
if topic.nil?
puts "ERROR: Topic is nil for reply post_id #{reply_row['post_id']}, attempting to BYPASS anyway"
end
puts "TIJ JJ post_id #{post_id} reply post_id #{reply_row['post_id']} reply_post_views #{reply_post_views || 0} post_number #{post_number} current_topic_id #{current_topic_id} reply_post_views #{reply_post_views || 0}"
## ActiveRecord::Base.transaction do
# Create the reply post in the existing topic
post = Post.create!(
topic_id: current_topic_id,
user_id: reply_user_id,
raw: sanitized_reply_message,
created_at: Time.at(reply_row['post_time']),
updated_at: Time.at(reply_row['post_time']),
reads: reply_post_views || 0,
post_number: post_number
)
post.custom_fields['original_gossamer_id'] = reply_row['post_id']
post.save!
## end
puts "TIJ KK post_id #{post_id}"
# Increment the post count for the topic and user
sqlite_mutex.synchronize do
update_db_topic_post_count(current_topic_id, fetch_db_topic_post_count(current_topic_id).to_i + 1)
update_db_user_post_count(reply_user_id, fetch_db_user_post_count(reply_user_id).to_i + 1)
end
# Update last post time and user for the topic
if fetch_db_topic_last_post_time(current_topic_id).nil? || Time.at(reply_row['post_time']).to_i > fetch_db_topic_last_post_time(current_topic_id).to_i
sqlite_mutex.synchronize do
update_db_topic_last_post_time(current_topic_id, Time.at(reply_row['post_time']).to_i)
update_db_topic_last_post_user(current_topic_id, reply_user_id)
end
end
## ActiveRecord::Base.transaction do
# Handle any attachments associated with the reply
handle_post_attachments(reply_row['post_id'], post, reply_user_id, mysql_client)
## end
# # Update the highest processed post_id in the database (thread-safe)
# update_highest_processed_post_id_thread_safe(reply_row['post_id'])
# rescue ActiveRecord::RecordInvalid => e
## rescue => e
## puts "Error importing reply with post_id #{reply_row['post_id']}: #{e.message}"
## puts e.backtrace.join("\n") # Print the full stack trace
## end
end
# # After processing the entire topic, update the highest_processed_post_id to the current topic's post_id (thread-safe)
# update_highest_processed_post_id_thread_safe(post_id)
rescue ActiveRecord::RecordInvalid => e
puts "---> ERROR importing topic with post_id #{row['post_id']}: #{e.message}"
raise ActiveRecord::Rollback
end
end
else
puts "Topic for post_id #{row['post_id']} already exists, skipping creation."
end
puts " LAST Removing MySQL connection"
##### mysql_client.close # if mysql_client
end
#-------------------------------------------------------------------
########## THREADING END --------------------------------------------
# Import topics and posts from Gossamer Forums to Discourse
def import_topics_and_posts_with_attachments
@ -993,7 +1549,7 @@ class GossamerForumsImporter < ImportScripts::Base
# Attachment example: highest_processed_post_id = 1359862
# Execute the query to get all posts ordered by post_id
execute_query("SELECT post_id, user_id_fk, forum_id_fk, post_root_id, post_subject, post_time, post_message, post_father_id, post_replies FROM gforum_Post ORDER BY post_id").each do |row|
execute_query("SELECT post_id, user_id_fk, forum_id_fk, post_root_id, post_subject, post_time, post_message, post_father_id, post_likes, post_replies FROM gforum_Post ORDER BY post_id").each do |row|
post_id = row['post_id'].to_i
# Skip posts that have already been processed
@ -1018,7 +1574,7 @@ class GossamerForumsImporter < ImportScripts::Base
unique_title = title
# Confirm the number of views the post has had
post_views = fetch_post_views(row['post_id'])
post_views = fetch_post_views(row['post_id'], @mysql_client)
# Skip if the topic already exists
unless TopicCustomField.exists?(name: 'original_gossamer_id', value: row['post_id'])
@ -1128,7 +1684,7 @@ class GossamerForumsImporter < ImportScripts::Base
puts "#3"
# Confirm the number of views the post has had
post_views = fetch_post_views(row['post_id'])
post_views = fetch_post_views(row['post_id'], @mysql_client)
# Find the root topic for the post
root_topic_field = TopicCustomField.find_by(name: 'original_gossamer_id', value: row['post_root_id'])
@ -1261,9 +1817,10 @@ class GossamerForumsImporter < ImportScripts::Base
# Determine the new Trust Level based on post_count
user = User.find(user_id)
new_trust_level = case count
when 0..2 then 1 # basic user
when 3..50 then 2 # member
else 3 # regular or above when 51..100
when 0..29 then 1 # basic user
else 2 # member, regular reserved for now.
# when 3..50 then 2 # member
# else 3 # regular or above when 51..100
end
# Fetch the current user and check if Trust Level needs updating
@ -1429,15 +1986,21 @@ class GossamerForumsImporter < ImportScripts::Base
# add_former_user
# import_users
# export_username_mapping_to_csv("gossamer-migration-username-mapping#{timestamp}")
#
# generate_user_id_mapping
# export_username_mapping_to_csv("/bitnami/discourse/sqlite/gossamer-migration-username-mapping#{timestamp}")
set_user_bio_images
# import_categories
#
import_topics_and_posts_with_attachments
####### import_topics_and_posts_with_attachments
### threaded_topic_import
update_topic_stats
update_user_stats
export_url_mapping_to_csv("gossamer-migration-url-mapping#{timestamp}")
create_nginx_rewrite_rules("gossamer-redirects#{timestamp}.conf")
export_url_mapping_to_csv("/bitnami/discourse/sqlite/gossamer-migration-url-mapping#{timestamp}")
create_nginx_rewrite_rules("/bitnami/discourse/sqlite/gossamer-redirects#{timestamp}.conf")
import_personal_messages

658
gosss.rb Normal file
View File

@ -0,0 +1,658 @@
# gossamer threads migration-import code
# v0.12
require 'mysql2'
require 'open-uri'
require 'net/http'
require 'tempfile'
require 'sqlite3'
require 'digest'
require 'fileutils'
require 'csv'
require 'time'
require File.expand_path("../../../config/environment", __FILE__)
require_relative 'base'
class GossamerForumsImporter < ImportScripts::Base
def initialize
super
begin
# Initialize MySQL client to connect to Gossamer Forums database
@mysql_client = Mysql2::Client.new(
host: "slot.northend.network",
username: "admin",
password: "yxnh93Ybbz2Nm8#mp28zCVv",
database: "slot"
)
rescue Mysql2::Error => e
puts "Error connecting to MySQL: #{e.message}"
exit 1
end
# # Create a mapping of old Gossamer user IDs to new Discourse user IDs
# @user_id_map = {}
initialize_sqlite_id_name_url_db
end
def initialize_sqlite_id_name_url_db
@db = SQLite3::Database.new 'id_name_url_map.db'
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS user_id_map (
old_user_id INTEGER PRIMARY KEY,
new_user_id INTEGER
);
SQL
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS category_id_map (
old_category_id INTEGER PRIMARY KEY,
new_category_id INTEGER
);
SQL
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS username_map (
id INTEGER PRIMARY KEY,
old_username TEXT,
new_username TEXT,
email TEXT,
real_name TEXT
);
SQL
@db.execute <<-SQL
CREATE TABLE IF NOT EXISTS url_map (
old_post_id INTEGER PRIMARY KEY,
new_url TEXT,
title TEXT
# created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
SQL
end
def insert_user_id_mapping(old_user_id, new_user_id)
@db.execute "INSERT OR REPLACE INTO user_id_map (old_user_id, new_user_id) VALUES (?, ?)", old_user_id, new_user_id
end
def fetch_user_id_mapping(old_user_id)
@db.get_first_value "SELECT new_user_id FROM user_id_map WHERE old_user_id = ?", old_user_id
end
def insert_category_id_mapping(old_category_id, new_category_id)
@db.execute "INSERT OR REPLACE INTO category_id_map (old_category_id, new_category_id) VALUES (?, ?)", old_category_id, new_category_id
end
def fetch_category_id_mapping(old_category_id)
@db.get_first_value "SELECT new_category_id FROM category_id_map WHERE old_category_id = ?", old_category_id
end
def insert_username_mapping(old_username, new_username, email, real_name)
@db.execute "INSERT INTO username_map (old_username, new_username, email, real_name) VALUES (?, ?, ?, ?)", old_username, new_username, email, real_name
end
# Define a method to export the username mapping table to a CSV file
def export_username_mapping_to_csv(filename)
CSV.open(filename, 'w') do |csv|
# Add headers
csv << ['Old Username', 'New Username', 'Email', 'Full Name']
# Fetch data from the database
@db.execute("SELECT old_username, new_username, email, real_name FROM username_map") do |row|
csv << row
end
end
puts "Exported changed username mappings to #{filename}"
end
# Insert a URL mapping into the SQLite database
def insert_url_mapping(old_post_id, new_url, title)
@db.execute "INSERT INTO url_map (old_post_id, new_url, title) VALUES (?, ?, ?)", [old_post_id, new_url, title]
end
# Export the URL mappings to a CSV file
def export_url_mapping_to_csv(filename)
CSV.open(filename, "w") do |csv|
# Add headers
csv << ["Old Post ID", "New URL", "Title"]
@db.execute("SELECT old_post_id, new_url, title FROM url_map") do |row|
csv << row
end
end
puts "Exported URL mappings to #{filename}"
end
# Method to create Nginx rewrite rules file
def create_nginx_rewrite_rules(filename)
File.open(filename, "w") do |file|
@db.execute("SELECT old_post_id, new_url FROM url_map") do |row|
old_post_id, new_url = row
file.puts "rewrite ^/forum/.*P#{old_post_id}/$ #{new_url} permanent;"
end
end
end
# Execute an SQL query on the Gossamer Forums database
def execute_query(query)
@mysql_client.query(query, as: :hash)
end
# Sanitize the username to meet Discourse's requirements
def sanitize_username(original_username, email, real_name)
# original_username = username
sanitized_username = username.gsub(/[^a-zA-Z0-9._-]/, '_')
sanitized_username = "#{sanitized_username}." if sanitized_username.length < 2 # Allow two-character usernames
sanitized_username = sanitized_username[0, 20] if sanitized_username.length > 20
firststep_sanitized = sanitized_username
existing_user = User.find_by(username: sanitized_username)
if existing_user
if existing_user.email.downcase == email.downcase && existing_user.name == name
return sanitized_username
else
counter = 1
while User.exists?(username: sanitized_username)
sanitized_username = "#{firststep_sanitized}_#{counter}"
sanitized_username = sanitized_username[0, 20] if sanitized_username.length > 20
counter += 1
end
end
end
if original_username != sanitized_username
# The Discourse username is not the same as the Gossamer Forums username
puts "Sanitized username: '#{original_username}' --> '#{sanitized_username}'"
insert_username_mapping(original_username, sanitized_username, email, real_name)
# else
# puts "UNsanitized username: '#{original_username}' --> '#{sanitized_username}'"
end
sanitized_username
end
# Sanitize email to replace restricted domains
def sanitize_email(email)
restricted_domains = ['mailinator.com', 'example.com'] # Add more restricted domains as needed
domain = email.split('@').last
if restricted_domains.include?(domain)
sanitized_email = email.gsub(domain, 'example.org') # Change to a permissible domain
puts "Sanitized email: '#{email}' --> '#{sanitized_email}'"
return sanitized_email
end
email
end
# Helper method to download an image from a URL
def download_image(url)
begin
URI.open(url).read
rescue OpenURI::HTTPError => e
puts "Failed to download image from #{url}: #{e.message}"
nil
rescue URI::InvalidURIError => e
puts "Failed to handle invalid URL/URI for #{url}: #{e.message}"
nil
end
end
def upload_image(user, file, filename, gossamer_url)
begin
upload = Upload.create!(
user_id: user.id,
original_filename: filename,
filesize: file.size,
# filesize: File.size(file.path),
# content_type: `file --brief --mime-type #{file.path}`.strip,
# sha1: Digest::SHA1.file(file.path).hexdigest,
# origin: 'user_avatar',
# retain_hours: nil,
url: gossamer_url
)
# Error -- non-existent method upload.ensure_consistency!
# Move the file to the correct location
# FileUtils.mv(file.path, upload.path)
upload.save!
upload
rescue => e
puts "Failed to upload image #{filename} for user #{user.username}: #{e.message}"
nil
end
end
# def download_file(url)
# require 'open-uri'
# begin
# file = Tempfile.new
# file.binmode
# file.write(URI.open(url).read)
# file.rewind
# file
# rescue => e
# puts "Failed to download file from #{url}: #{e.message}"
# nil
# end
# end
# Helper method to upload an image to Discourse
# def upload_image(user, image_data, filename)
# return if image_data.nil?
#
# upload = Upload.create_for(user.id, File.open(image_data.path), filename, 'image/jpeg')
# if upload.nil? || !upload.persisted?
# puts "Failed to upload image for user #{user.username}"
# return
# end
#
# upload
# end
# Import users from Gossamer Forums to Discourse
def import_users
puts "Importing users..."
users = []
# Fetch all users from Gossamer Forums
execute_query("SELECT * FROM gforum_User").each do |row|
users << {
id: row['user_id'],
username: sanitize_username(row['user_username'], row['user_email'], row['user_real_name']),
email: row['user_email'],
created_at: Time.at(row['user_registered']),
updated_at: Time.at(row['user_last_seen']),
name: row['user_real_name'],
title: row['user_title'],
bio_raw: row['user_about'] || "",
website: row['user_homepage'],
location: row['user_location'],
custom_fields: {
md5_password: row['user_password'],
original_username: row['user_username'],
original_gossamer_id: row['user_id']
}
}
end
# Create or update users in Discourse
create_users(users) do |user|
# insert_user_id_mapping(user[:id], user.id)
user
end
# For each user, add user ID mapping to SQLite now that we know what the Discourse user ID is, ... and append user bio and import user files
users.each do |discourse_user|
# discourse_username = sanitize_username(user[:username], user[:email], user[:name])
# discourse_user = User.find_by(username: discourse_username)
# if discourse_user.nil?
# puts "User #{user[:username]} --> #{discourse_username} not found in Discourse. Skipping file import."
# next
# end
# # Store the user ID mapping
# @user_id_map[user[:id]] = discourse_user.id
puts "for insert_user_id_mapping: user[:id] #{user[:id]} discourse_user.id #{discourse_user.id}"
insert_user_id_mapping(user[:id], discourse_user.id)
# Ensure user profile exists and bio_raw is a string
discourse_user.user_profile ||= UserProfile.new(user_id: discourse_user.id)
discourse_user.user_profile.bio_raw ||= ""
# Append bio if it exists, otherwise set it to empty string to avoid nil errors
if discourse_user.user_profile.bio_raw.empty?
discourse_user.user_profile.bio_raw = user[:bio_raw]
else
discourse_user.user_profile.bio_raw += "\n\n" + user[:bio_raw]
end
# Ensure the bio does not exceed 3000 characters
if discourse_user.user_profile.bio_raw.length > 3000
puts "Warning: About Me for user #{discourse_user.username} (ID: #{discourse_user.id}) exceeds 3000 characters. Truncating."
discourse_user.user_profile.bio_raw = discourse_user.user_profile.bio_raw[0, 3000]
end
discourse_user.user_profile.save!
# Import user files
import_user_files(discourse_user)
end
end
# # Import user files from Gossamer Forums to Discourse
# def import_user_files(user)
# print "\rImporting files for user #{user.username}..."
#
# original_gossamer_id = user.custom_fields['original_gossamer_id']
# if original_gossamer_id.nil? || original_gossamer_id.empty?
# puts "User #{user.username} does not have a valid original_gossamer_id. Skipping file import."
# return
# end
#
# # puts "Original Gossamer ID for user #{user.username}: #{original_gossamer_id}"
#
# # Fetch and import user files
# execute_query("SELECT * FROM gforum_User_Files WHERE ForeignColKey = #{original_gossamer_id}").each do |file|
# # Construct the file URL
# file_url = "https://forum.slot.com/images/users/images/#{file['ID'] % 10}/#{file['ID']}-#{file['File_Name']}"
# puts "User #{user.username} User ID: #{user.id} original_gossamer_id: #{original_gossamer_id} file_url: #{file_url}"
#
# new_bio = user.user_profile.bio_raw + "\n\n![#{file['File_Name']}](#{file_url})"
# if new_bio.length > 3000
# puts "Warning: About Me for user #{user.username} (ID: #{user.id}) exceeds 3000 characters after adding file link. Truncating."
# new_bio = new_bio[0, 3000]
# end
# user.user_profile.bio_raw = new_bio
# user.user_profile.save!
# end
# print "Importing files for user #{user.username}... Done.\n"
# end
# Import user files (profile images) from Gossamer Forums to Discourse
def import_user_files(user)
print "\rImporting files for user #{user.username}..."
original_gossamer_id = user.custom_fields['original_gossamer_id']
if original_gossamer_id.nil? || original_gossamer_id.empty?
puts "User #{user.username} does not have a valid original_gossamer_id. Skipping file import."
return
end
puts "Original Gossamer ID for user #{user.username}: #{original_gossamer_id}"
images_imported = 0
execute_query("SELECT * FROM gforum_User_Files WHERE ForeignColKey = #{original_gossamer_id}").each do |file|
file_url = "https://forum.slot.com/images/users/images/#{file['ID'] % 10}/#{file['ID']}-#{file['File_Name']}"
puts "User #{user.username} User ID: #{user.id} original_gossamer_id: #{original_gossamer_id} file_url: #{file_url}"
next unless file['ForeignColName'] =~ /^user_image\d+$/
puts "#A"
next unless ['image/jpeg', 'image/png'].include?(file['File_MimeType'])
puts "#B"
image_data = download_image(file_url)
next if image_data.nil?
puts "#C"
temp_file = Tempfile.new(['user_image', File.extname(file['File_Name'])])
temp_file.binmode
temp_file.write(image_data)
temp_file.rewind
if images_imported == 0
puts "#D"
upload = upload_image(user, temp_file, file['File_Name'], file_url)
next if upload.nil?
user.user_avatar = UserAvatar.create!(user_id: user.id, custom_upload_id: upload.id)
user.save!
# Set the Profile Header
UserProfile.find_by(user_id: user.id).update!(profile_background_upload_id: upload.id)
# Set the User Card Background
UserProfile.find_by(user_id: user.id).update!(card_background_upload_id: upload.id)
images_imported += 1
end
puts "#E"
user.user_profile.bio_raw ||= ""
user.user_profile.bio_raw += "\n\n![#{file['File_Name']}](#{file_url})"
user.user_profile.save!
temp_file.close
temp_file.unlink
end
print "Importing files for user #{user.username}... Done.\n"
end
# Import categories from Gossamer Forums to Discourse
def import_categories
puts "Importing categories (forums)..."
execute_query("SELECT * FROM gforum_Forum").each do |row|
# Only create category if it does not exist
unless CategoryCustomField.exists?(name: 'original_gossamer_id', value: row['forum_id'])
category_name = row['forum_name']
category_description = row['forum_desc'] || "No description provided"
puts "id #{row['forum_id']} name #{category_name} description #{category_description}"
# Create category in Discourse
category = create_category(
{
# id: row['forum_id'] + 10,
name: category_name,
description: category_description,
created_at: row['forum_last'] ? Time.at(row['forum_last']) : Time.now,
updated_at: row['forum_last'] ? Time.at(row['forum_last']) : Time.now
},
row['forum_id'] # import_id argument
)
# # Map Gossamer forum ID to Discourse category ID for future reference
# @forum_id_map[row['forum_id']] = category.id
# category.custom_fields.create!(name: 'original_gossamer_id', value: row['forum_id'])
category.custom_fields['original_gossamer_id'] = row['forum_id']
category.save!
# Store the user ID mapping
puts "for insert_category_id_mapping: category[:id] #{category[:id]} row['forum_id'] #{row['forum_id']}"
insert_category_id_mapping(row['forum_id'], category[:id])
end
end
puts "Importing categories... Done."
end
# Helper function to ensure title meets the minimum length requirement
def ensure_valid_title(title, min_length = 5)
if title.length < min_length
title += "." * (min_length - title.length) # Append dots to make it longer
end
title
end
# Import topics and posts from Gossamer Forums to Discourse
def import_topics_and_posts
puts "Importing topics and posts..."
# Execute the query to get all posts ordered by post_id
execute_query("SELECT * FROM gforum_Post ORDER BY post_id").each do |row|
puts "post_id #{row['post_id']} post_root_id #{row['post_root_id']} post_subject/title #{row['post_subject']} forum_id_fk/category_id #{row['forum_id_fk']}"
# discourse_user_id = @user_id_map[row['user_id_fk']]
discourse_user_id = fetch_user_id_mapping(row['user_id_fk'])
discourse_category_id = fetch_category_id_mapping(row['forum_id_fk'])
puts "discourse_user_id #{discourse_user_id} discourse_category_id #{discourse_category_id}"
next unless discourse_user_id && discourse_category_id
if row['post_root_id'] == 0
puts "#1"
# Ensure the title is valid
title = ensure_valid_title(row['post_subject'])
# Skip if the topic already exists
unless TopicCustomField.exists?(name: 'original_gossamer_id', value: row['post_id'])
# Create the topic
begin
puts "#2"
puts "CREATE TOPIC title #{title} discourse_user_id #{discourse_user_id} category_id #{discourse_category_id}"
topic = Topic.create!(
title: title,
user_id: discourse_user_id,
created_at: Time.at(row['post_time']),
updated_at: Time.at(row['post_latest_reply']),
category_id: discourse_category_id
)
topic.custom_fields['original_gossamer_id'] = row['post_id']
topic.save!
# Create the initial post in the topic
puts "CREATE POST topic.id #{topic.id} discourse_user_id #{discourse_user_id}"
sanitized_post_message = row['post_message']&.tr("\0", '') || ""
post = Post.create!(
topic_id: topic.id,
user_id: discourse_user_id,
# raw: import_attachments(row['post_message'], row['post_id']),
# raw: row['post_message'] || "",
raw: sanitized_post_message,
created_at: Time.at(row['post_time']),
updated_at: Time.at(row['post_latest_reply'])
)
post.custom_fields['original_gossamer_id'] = row['post_id']
post.save!
# Create URL mappings
# old_url = "https://old/forum/#{row['forum_name']}/topics/#{row['post_id']}"
new_url = "https://new/t/#{topic.slug}/#{topic.id}"
insert_url_mapping(row['post_id'], new_url, title)
rescue ActiveRecord::RecordInvalid => e
puts "Error importing topic with post_id #{row['post_id']}: #{e.message}"
end
end
else
puts "#3"
# Find the root topic for the post
root_topic_field = TopicCustomField.find_by(name: 'original_gossamer_id', value: row['post_root_id'])
if root_topic_field
topic_id = root_topic_field.topic_id
# Find the parent post for the reply
parent_post_field = PostCustomField.find_by(name: 'original_gossamer_id', value: row['post_father_id'])
reply_to_post_number = parent_post_field ? Post.find(parent_post_field.post_id).post_number : nil
# Create the post in the existing topic
begin
puts "#4"
sanitized_post_message = row['post_message']&.tr("\0", '') || ""
post = Post.create!(
topic_id: topic_id,
user_id: discourse_user_id,
# raw: import_attachments(row['post_message'], row['post_id']),
# raw: row['post_message'] || "",
raw: sanitized_post_message,
created_at: Time.at(row['post_time']),
updated_at: Time.at(row['post_latest_reply']),
reply_to_post_number: reply_to_post_number
)
post.custom_fields['original_gossamer_id'] = row['post_id']
post.save!
rescue ActiveRecord::RecordInvalid => e
puts "Error importing post with post_id #{row['post_id']}: #{e.message}"
end
else
puts "Warning: Root topic not found for post_id #{row['post_id']} with post_root_id #{row['post_root_id']}"
end
end
end
end
# Import personal messages from gforum_Message table (both inbox and sent messages)
def import_personal_messages
puts "Importing personal (inbox and sendmail) messages..."
execute_query("SELECT * FROM gforum_Message").each do |row|
from_user_id = fetch_user_id_mapping(row['from_user_id_fk'])
to_user_id = fetch_user_id_mapping(row['to_user_id_fk'])
next unless from_user_id && to_user_id
# Skip if the message already exists
unless TopicCustomField.exists?(name: 'original_gossamer_msg_id', value: row['msg_id'])
# Sanitize the message, ensuring we have an empty string or the content without any \0
sanitized_message = row['msg_body']&.tr("\0", '') || ""
# Set default message body if the sanitized message is blank
sanitized_message = " " if sanitized_message.strip.empty?
# # If we do not change the "min personal message post length" to 1, we need this.
# sanitized_message = sanitized_message.ljust(10, ' ') if sanitized_message.length < 10
# Check and set a default title if the original title is nil or empty
title = row['msg_subject']&.strip
title = "<no subject>" if title.nil? || title.empty?
puts "IMPORTING title #{row['msg_subject']} user_id #{from_user_id} to_user_id #{to_user_id}"
# Create a private message topic in Discourse
topic = Topic.create!(
title: row['msg_subject'],
user_id: from_user_id,
archetype: Archetype.private_message,
created_at: Time.at(row['msg_time']),
updated_at: Time.at(row['msg_time'])
)
topic.custom_fields['original_gossamer_msg_id'] = row['msg_id']
topic.save!
# Create the message as a post in the private topic
post = Post.create!(
topic_id: topic.id,
user_id: from_user_id,
# raw: row['msg_body'],
raw: sanitized_message,
created_at: Time.at(row['msg_time']),
updated_at: Time.at(row['msg_time'])
)
post.custom_fields['original_gossamer_msg_id'] = row['msg_id']
post.save!
# Add recipient user to the private message topic
topic.topic_allowed_users.create!(user_id: to_user_id)
end
end
end
# Import attachments for a post
def import_post_attachments(post_message, post_id)
# Fetch attachments related to the post
attachments = execute_query("SELECT * FROM gforum_PostAttachment WHERE post_id_fk = #{post_id}")
attachments.each do |attachment|
# Append attachment links to the post message
file_url = "https://forum.slot.com/images/posts/attachments/#{attachment['ID'] % 10}/#{attachment['ID']}-#{attachment['File_Name']}"
post_message += "\n\n![#{attachment['File_Name']}](#{file_url})"
end
1# post_message
end
# Main method to perform the import
def perform_import
# Secret trick to disable RateLimiting protection in Discourse
RateLimiter.disable
# Set our unique timestamp for this migration run
timestamp = Time.now.strftime("-%y%m%d%H%M%S")
puts "Starting Gossamer Forums import... #{timestamp}"
import_users
export_username_mapping_to_csv("gossamer-migration-username-mapping#{timestamp}")
import_categories
import_topics_and_posts
export_url_mapping_to_csv("gossamer-migration-url-mapping#{timestamp}")
create_nginx_rewrite_rules("gossamer-redirects.conf")
import_personal_messages
# import_attachments
puts "Gossamer Forums import complete! #{timestamp}"
end
end
GossamerForumsImporter.new.perform_import