From 0e275ba88654ca4df3befe2bf197ed07eb448a4d Mon Sep 17 00:00:00 2001 From: Ahrimdon Date: Sat, 2 Mar 2024 21:05:13 -0500 Subject: [PATCH] Organize directory, add build script --- build.py | 21 + .../db_export_connectionhistory.py | 162 +++--- .../db_export_connectionhistory_json.py | 64 +-- .../db_export_messages.py | 154 ++--- .../db_export_messages_json.py | 64 +-- .../Export EFAlias}/db_export_ip.py | 90 +-- .../Export EFAlias}/db_export_ip_json.py | 56 +- .../db_export_auditlog.py | 184 +++--- .../db_export_auditlog_json.py | 66 +-- .../Export EFClients}/db_export_clients.py | 170 +++--- .../db_export_clients_json.py | 68 +-- .../Export EFMaps}/db_export_maps.py | 98 ++-- .../Export EFMaps}/db_export_maps_json.py | 68 +-- .../Export EFMeta}/db_export_metadata.py | 166 +++--- .../Export EFMeta}/db_export_metadata_json.py | 62 +- .../db_export_penalties.py | 264 ++++----- .../db_export_penalties_json.py | 70 +-- .../db_export_penaltyidentifiers.py | 140 ++--- .../db_export_penaltyidentifiers_json.py | 50 +- .../Export EFServers}/db_export_servers.py | 132 ++--- .../db_export_servers_json.py | 64 +-- .../db_export_inboxmessages.py | 110 ++-- .../db_export_inboxmessages_json.py | 66 +-- export_db_json.py => src/export_db_json.py | 528 +++++++++--------- 24 files changed, 1469 insertions(+), 1448 deletions(-) create mode 100644 build.py rename {Export ClientConnectionHistory => src/Export ClientConnectionHistory}/db_export_connectionhistory.py (96%) rename {Export ClientConnectionHistory => src/Export ClientConnectionHistory}/db_export_connectionhistory_json.py (96%) rename {Export ClientMessages => src/Export ClientMessages}/db_export_messages.py (96%) rename {Export ClientMessages => src/Export ClientMessages}/db_export_messages_json.py (96%) rename {Export EFAlias => src/Export EFAlias}/db_export_ip.py (96%) rename {Export EFAlias => src/Export EFAlias}/db_export_ip_json.py (96%) rename {Export EFChangeHistory => src/Export EFChangeHistory}/db_export_auditlog.py (97%) rename {Export EFChangeHistory => src/Export EFChangeHistory}/db_export_auditlog_json.py (96%) rename {Export EFClients => src/Export EFClients}/db_export_clients.py (96%) rename {Export EFClients => src/Export EFClients}/db_export_clients_json.py (96%) rename {Export EFMaps => src/Export EFMaps}/db_export_maps.py (96%) rename {Export EFMaps => src/Export EFMaps}/db_export_maps_json.py (95%) rename {Export EFMeta => src/Export EFMeta}/db_export_metadata.py (96%) rename {Export EFMeta => src/Export EFMeta}/db_export_metadata_json.py (95%) rename {Export EFPenalties => src/Export EFPenalties}/db_export_penalties.py (96%) rename {Export EFPenalties => src/Export EFPenalties}/db_export_penalties_json.py (96%) rename {Export EFPenaltyIdentifiers => src/Export EFPenaltyIdentifiers}/db_export_penaltyidentifiers.py (95%) rename {Export EFPenaltyIdentifiers => src/Export EFPenaltyIdentifiers}/db_export_penaltyidentifiers_json.py (96%) rename {Export EFServers => src/Export EFServers}/db_export_servers.py (96%) rename {Export EFServers => src/Export EFServers}/db_export_servers_json.py (96%) rename {Export InboxMessages => src/Export InboxMessages}/db_export_inboxmessages.py (97%) rename {Export InboxMessages => src/Export InboxMessages}/db_export_inboxmessages_json.py (96%) rename export_db_json.py => src/export_db_json.py (96%) diff --git a/build.py b/build.py new file mode 100644 index 0000000..03c54ef --- /dev/null +++ b/build.py @@ -0,0 +1,21 @@ +import os +import PyInstaller.__main__ +from distutils.sysconfig import get_python_lib + +site_packages_path = get_python_lib() + +NAME = "IW4MAdmin_DB_Parser" +SCRIPT = "combine_db.py" + +PyInstaller.__main__.run([ + "{}".format(SCRIPT), + '--name', f"{NAME}", + "--noconfirm", + "--onefile", + "--windowed", +]) + +# create symbolic hardlink to main directory +if os.path.exists("combine_db.exe"): + os.remove("combine_db.exe") +os.link('dist/IW4MAdmin_DB_Parser.exe', 'IW4MAdmin_DB_Parser.exe') \ No newline at end of file diff --git a/Export ClientConnectionHistory/db_export_connectionhistory.py b/src/Export ClientConnectionHistory/db_export_connectionhistory.py similarity index 96% rename from Export ClientConnectionHistory/db_export_connectionhistory.py rename to src/Export ClientConnectionHistory/db_export_connectionhistory.py index 3062843..51857e1 100644 --- a/Export ClientConnectionHistory/db_export_connectionhistory.py +++ b/src/Export ClientConnectionHistory/db_export_connectionhistory.py @@ -1,82 +1,82 @@ -import sqlite3 - -# Connect to the existing database -existing_conn = sqlite3.connect("Database.db") -existing_cur = existing_conn.cursor() - -# Connect to the new database -new_conn = sqlite3.connect("Plutonium_Servers.db") -new_cur = new_conn.cursor() - -# Create the modified ClientConnectionHistory table in the new database -new_cur.execute(""" -CREATE TABLE "ClientConnectionHistory" ( - "ConnectionId" INTEGER NOT NULL, - "Client" TEXT NOT NULL, - "ConnectionTime" TEXT NOT NULL, - "ConnectionType" TEXT NOT NULL, - "Server" TEXT, - CONSTRAINT "PK_ClientConnectionHistory" PRIMARY KEY("ConnectionId" AUTOINCREMENT) -) -""") - -# Fetch data from existing EFClientConnectionHistory -existing_cur.execute(""" -SELECT - EFClientConnectionHistory.ClientConnectionId, - EFClientConnectionHistory.ClientId, - EFClientConnectionHistory.CreatedDateTime, - EFClientConnectionHistory.ConnectionType, - EFClientConnectionHistory.ServerId -FROM - EFClientConnectionHistory -""") -rows = existing_cur.fetchall() - -for row in rows: - client_id = row[1] - server_id = row[4] - - # Retrieve client name - existing_cur.execute(""" - SELECT - EFAlias.Name - FROM - EFClients - JOIN EFAlias ON EFClients.CurrentAliasId = EFAlias.AliasId - WHERE - EFClients.ClientId = ? - """, (client_id,)) - client_name = existing_cur.fetchone() - if client_name: - client_name = client_name[0].replace('^7', '') - else: - client_name = 'Unknown' - - # Retrieve server hostname - existing_cur.execute(""" - SELECT - EFServers.HostName - FROM - EFServers - WHERE - EFServers.ServerId = ? - """, (server_id,)) - server_hostname = existing_cur.fetchone() - if server_hostname: - server_hostname = server_hostname[0] - else: - server_hostname = 'Unknown' - - # Map ConnectionType values to their corresponding text - connection_type_map = {0: "Connect", 1: "Disconnect"} - connection_type = connection_type_map[row[3]] - - # Insert the modified row into the new ClientConnectionHistory table - new_row = (row[0], client_name, row[2], connection_type, server_hostname) - new_cur.execute("INSERT INTO ClientConnectionHistory (ConnectionId, Client, ConnectionTime, ConnectionType, Server) VALUES (?, ?, ?, ?, ?)", new_row) - -# Commit the changes and close the connections -new_conn.commit() -existing_conn.close() +import sqlite3 + +# Connect to the existing database +existing_conn = sqlite3.connect("Database.db") +existing_cur = existing_conn.cursor() + +# Connect to the new database +new_conn = sqlite3.connect("Plutonium_Servers.db") +new_cur = new_conn.cursor() + +# Create the modified ClientConnectionHistory table in the new database +new_cur.execute(""" +CREATE TABLE "ClientConnectionHistory" ( + "ConnectionId" INTEGER NOT NULL, + "Client" TEXT NOT NULL, + "ConnectionTime" TEXT NOT NULL, + "ConnectionType" TEXT NOT NULL, + "Server" TEXT, + CONSTRAINT "PK_ClientConnectionHistory" PRIMARY KEY("ConnectionId" AUTOINCREMENT) +) +""") + +# Fetch data from existing EFClientConnectionHistory +existing_cur.execute(""" +SELECT + EFClientConnectionHistory.ClientConnectionId, + EFClientConnectionHistory.ClientId, + EFClientConnectionHistory.CreatedDateTime, + EFClientConnectionHistory.ConnectionType, + EFClientConnectionHistory.ServerId +FROM + EFClientConnectionHistory +""") +rows = existing_cur.fetchall() + +for row in rows: + client_id = row[1] + server_id = row[4] + + # Retrieve client name + existing_cur.execute(""" + SELECT + EFAlias.Name + FROM + EFClients + JOIN EFAlias ON EFClients.CurrentAliasId = EFAlias.AliasId + WHERE + EFClients.ClientId = ? + """, (client_id,)) + client_name = existing_cur.fetchone() + if client_name: + client_name = client_name[0].replace('^7', '') + else: + client_name = 'Unknown' + + # Retrieve server hostname + existing_cur.execute(""" + SELECT + EFServers.HostName + FROM + EFServers + WHERE + EFServers.ServerId = ? + """, (server_id,)) + server_hostname = existing_cur.fetchone() + if server_hostname: + server_hostname = server_hostname[0] + else: + server_hostname = 'Unknown' + + # Map ConnectionType values to their corresponding text + connection_type_map = {0: "Connect", 1: "Disconnect"} + connection_type = connection_type_map[row[3]] + + # Insert the modified row into the new ClientConnectionHistory table + new_row = (row[0], client_name, row[2], connection_type, server_hostname) + new_cur.execute("INSERT INTO ClientConnectionHistory (ConnectionId, Client, ConnectionTime, ConnectionType, Server) VALUES (?, ?, ?, ?, ?)", new_row) + +# Commit the changes and close the connections +new_conn.commit() +existing_conn.close() new_conn.close() \ No newline at end of file diff --git a/Export ClientConnectionHistory/db_export_connectionhistory_json.py b/src/Export ClientConnectionHistory/db_export_connectionhistory_json.py similarity index 96% rename from Export ClientConnectionHistory/db_export_connectionhistory_json.py rename to src/Export ClientConnectionHistory/db_export_connectionhistory_json.py index a1915e7..55a48fd 100644 --- a/Export ClientConnectionHistory/db_export_connectionhistory_json.py +++ b/src/Export ClientConnectionHistory/db_export_connectionhistory_json.py @@ -1,32 +1,32 @@ -import sqlite3 -import json - -# Connect to the new database -new_conn = sqlite3.connect("Database.db") -new_cur = new_conn.cursor() - -# Fetch data from the ClientConnectionHistory table sorted by ConnectionTime in descending order -new_cur.execute(""" -SELECT ConnectionId, Client, ConnectionTime, ConnectionType, Server -FROM ClientConnectionHistory -ORDER BY ConnectionTime DESC -""") -client_connection_history = new_cur.fetchall() - -# Create a list of dictionaries representing the client connection history -client_connection_history_list = [] -for row in client_connection_history: - client_connection_history_list.append({ - "ConnectionId": row[0], - "Client": row[1], - "ConnectionTime": row[2], - "ConnectionType": row[3], - "Server": row[4] - }) - -# Write the client connection history to a JSON file -with open("ClientConnectionHistory.json", "w") as f: - json.dump(client_connection_history_list, f, indent=2) - -# Close the new database -new_conn.close() +import sqlite3 +import json + +# Connect to the new database +new_conn = sqlite3.connect("Database.db") +new_cur = new_conn.cursor() + +# Fetch data from the ClientConnectionHistory table sorted by ConnectionTime in descending order +new_cur.execute(""" +SELECT ConnectionId, Client, ConnectionTime, ConnectionType, Server +FROM ClientConnectionHistory +ORDER BY ConnectionTime DESC +""") +client_connection_history = new_cur.fetchall() + +# Create a list of dictionaries representing the client connection history +client_connection_history_list = [] +for row in client_connection_history: + client_connection_history_list.append({ + "ConnectionId": row[0], + "Client": row[1], + "ConnectionTime": row[2], + "ConnectionType": row[3], + "Server": row[4] + }) + +# Write the client connection history to a JSON file +with open("ClientConnectionHistory.json", "w") as f: + json.dump(client_connection_history_list, f, indent=2) + +# Close the new database +new_conn.close() diff --git a/Export ClientMessages/db_export_messages.py b/src/Export ClientMessages/db_export_messages.py similarity index 96% rename from Export ClientMessages/db_export_messages.py rename to src/Export ClientMessages/db_export_messages.py index 9f542ac..0590615 100644 --- a/Export ClientMessages/db_export_messages.py +++ b/src/Export ClientMessages/db_export_messages.py @@ -1,78 +1,78 @@ -import sqlite3 - -# Connect to the existing database -existing_conn = sqlite3.connect("Database.db") -existing_cur = existing_conn.cursor() - -# Connect to the new database -new_conn = sqlite3.connect("Plutonium_Servers.db") -new_cur = new_conn.cursor() - -# Create the modified ClientMessages table in the new database -new_cur.execute(""" -CREATE TABLE "ClientMessages" ( - "MessageId" INTEGER NOT NULL, - "Client" TEXT NOT NULL, - "Message" TEXT NOT NULL, - "TimeSent" TEXT NOT NULL, - "Server" TEXT, - CONSTRAINT "PK_ClientMessages" PRIMARY KEY("MessageId" AUTOINCREMENT) -) -""") - -# Fetch data from existing EFClientMessages -existing_cur.execute(""" -SELECT - EFClientMessages.MessageId, - EFClientMessages.ClientId, - EFClientMessages.Message, - EFClientMessages.TimeSent, - EFClientMessages.ServerId -FROM - EFClientMessages -""") -rows = existing_cur.fetchall() - -for row in rows: - client_id = row[1] - server_id = row[4] - - # Retrieve client name - existing_cur.execute(""" - SELECT - EFAlias.Name - FROM - EFClients - JOIN EFAlias ON EFClients.CurrentAliasId = EFAlias.AliasId - WHERE - EFClients.ClientId = ? - """, (client_id,)) - client_name = existing_cur.fetchone() - if client_name: - client_name = client_name[0].replace('^7', '') - else: - client_name = 'Unknown' - - # Retrieve server hostname - existing_cur.execute(""" - SELECT - EFServers.HostName - FROM - EFServers - WHERE - EFServers.ServerId = ? - """, (server_id,)) - server_hostname = existing_cur.fetchone() - if server_hostname: - server_hostname = server_hostname[0] - else: - server_hostname = 'Unknown' - - # Insert the modified row into the new ClientMessages table - new_row = (row[0], client_name, row[2], row[3], server_hostname) - new_cur.execute("INSERT INTO ClientMessages (MessageId, Client, Message, TimeSent, Server) VALUES (?, ?, ?, ?, ?)", new_row) - -# Commit the changes and close the connections -new_conn.commit() -existing_conn.close() +import sqlite3 + +# Connect to the existing database +existing_conn = sqlite3.connect("Database.db") +existing_cur = existing_conn.cursor() + +# Connect to the new database +new_conn = sqlite3.connect("Plutonium_Servers.db") +new_cur = new_conn.cursor() + +# Create the modified ClientMessages table in the new database +new_cur.execute(""" +CREATE TABLE "ClientMessages" ( + "MessageId" INTEGER NOT NULL, + "Client" TEXT NOT NULL, + "Message" TEXT NOT NULL, + "TimeSent" TEXT NOT NULL, + "Server" TEXT, + CONSTRAINT "PK_ClientMessages" PRIMARY KEY("MessageId" AUTOINCREMENT) +) +""") + +# Fetch data from existing EFClientMessages +existing_cur.execute(""" +SELECT + EFClientMessages.MessageId, + EFClientMessages.ClientId, + EFClientMessages.Message, + EFClientMessages.TimeSent, + EFClientMessages.ServerId +FROM + EFClientMessages +""") +rows = existing_cur.fetchall() + +for row in rows: + client_id = row[1] + server_id = row[4] + + # Retrieve client name + existing_cur.execute(""" + SELECT + EFAlias.Name + FROM + EFClients + JOIN EFAlias ON EFClients.CurrentAliasId = EFAlias.AliasId + WHERE + EFClients.ClientId = ? + """, (client_id,)) + client_name = existing_cur.fetchone() + if client_name: + client_name = client_name[0].replace('^7', '') + else: + client_name = 'Unknown' + + # Retrieve server hostname + existing_cur.execute(""" + SELECT + EFServers.HostName + FROM + EFServers + WHERE + EFServers.ServerId = ? + """, (server_id,)) + server_hostname = existing_cur.fetchone() + if server_hostname: + server_hostname = server_hostname[0] + else: + server_hostname = 'Unknown' + + # Insert the modified row into the new ClientMessages table + new_row = (row[0], client_name, row[2], row[3], server_hostname) + new_cur.execute("INSERT INTO ClientMessages (MessageId, Client, Message, TimeSent, Server) VALUES (?, ?, ?, ?, ?)", new_row) + +# Commit the changes and close the connections +new_conn.commit() +existing_conn.close() new_conn.close() \ No newline at end of file diff --git a/Export ClientMessages/db_export_messages_json.py b/src/Export ClientMessages/db_export_messages_json.py similarity index 96% rename from Export ClientMessages/db_export_messages_json.py rename to src/Export ClientMessages/db_export_messages_json.py index ec199c7..1bfa9fd 100644 --- a/Export ClientMessages/db_export_messages_json.py +++ b/src/Export ClientMessages/db_export_messages_json.py @@ -1,32 +1,32 @@ -import sqlite3 -import json - -# Connect to the new database -new_conn = sqlite3.connect("Database.db") -new_cur = new_conn.cursor() - -# Fetch data from the ClientMessages table -new_cur.execute(""" -SELECT MessageId, Client, Message, TimeSent, Server -FROM ClientMessages -ORDER BY TimeSent DESC -""") -client_messages = new_cur.fetchall() - -# Create a list of dictionaries representing the client messages -client_messages_list = [] -for row in client_messages: - client_messages_list.append({ - "MessageId": row[0], - "Client": row[1], - "Message": row[2], - "TimeSent": row[3], - "Server": row[4] - }) - -# Write the client messages to a JSON file -with open("ClientMessages.json", "w") as f: - json.dump(client_messages_list, f, indent=2) - -# Close the new database -new_conn.close() +import sqlite3 +import json + +# Connect to the new database +new_conn = sqlite3.connect("Database.db") +new_cur = new_conn.cursor() + +# Fetch data from the ClientMessages table +new_cur.execute(""" +SELECT MessageId, Client, Message, TimeSent, Server +FROM ClientMessages +ORDER BY TimeSent DESC +""") +client_messages = new_cur.fetchall() + +# Create a list of dictionaries representing the client messages +client_messages_list = [] +for row in client_messages: + client_messages_list.append({ + "MessageId": row[0], + "Client": row[1], + "Message": row[2], + "TimeSent": row[3], + "Server": row[4] + }) + +# Write the client messages to a JSON file +with open("ClientMessages.json", "w") as f: + json.dump(client_messages_list, f, indent=2) + +# Close the new database +new_conn.close() diff --git a/Export EFAlias/db_export_ip.py b/src/Export EFAlias/db_export_ip.py similarity index 96% rename from Export EFAlias/db_export_ip.py rename to src/Export EFAlias/db_export_ip.py index 4ac6787..eb80e45 100644 --- a/Export EFAlias/db_export_ip.py +++ b/src/Export EFAlias/db_export_ip.py @@ -1,46 +1,46 @@ -import sqlite3 - -# Connect to the existing database -conn = sqlite3.connect("Database.db") -cur = conn.cursor() - -# Connect to the new database -new_conn = sqlite3.connect("Plutonium_Servers.db") -new_cur = new_conn.cursor() - -def fetch_client_info(src_cur): - src_cur.execute(""" - SELECT Name, SearchableIPAddress, DateAdded FROM EFAlias - """) - client_info = [] - for row in src_cur.fetchall(): - name = row[0].replace('^7', '') # Remove '^7' from the Name column - client_info.append((name, row[1], row[2])) - - return client_info - -# Fetch client info from EFAlias table in the existing database -client_info = fetch_client_info(cur) - -# Create the new table -new_cur.execute(""" -CREATE TABLE IF NOT EXISTS "IPAddresses" ( - Name TEXT NOT NULL, - SearchableIPAddress TEXT, - DateAdded TEXT NOT NULL -) -""") - -# Insert the fetched data into the new table -new_cur.executemany(""" -INSERT INTO "IPAddresses" ( - Name, SearchableIPAddress, DateAdded -) VALUES (?, ?, ?) -""", client_info) - -# Commit and close the new database -new_conn.commit() -new_conn.close() - -# Close the existing database +import sqlite3 + +# Connect to the existing database +conn = sqlite3.connect("Database.db") +cur = conn.cursor() + +# Connect to the new database +new_conn = sqlite3.connect("Plutonium_Servers.db") +new_cur = new_conn.cursor() + +def fetch_client_info(src_cur): + src_cur.execute(""" + SELECT Name, SearchableIPAddress, DateAdded FROM EFAlias + """) + client_info = [] + for row in src_cur.fetchall(): + name = row[0].replace('^7', '') # Remove '^7' from the Name column + client_info.append((name, row[1], row[2])) + + return client_info + +# Fetch client info from EFAlias table in the existing database +client_info = fetch_client_info(cur) + +# Create the new table +new_cur.execute(""" +CREATE TABLE IF NOT EXISTS "IPAddresses" ( + Name TEXT NOT NULL, + SearchableIPAddress TEXT, + DateAdded TEXT NOT NULL +) +""") + +# Insert the fetched data into the new table +new_cur.executemany(""" +INSERT INTO "IPAddresses" ( + Name, SearchableIPAddress, DateAdded +) VALUES (?, ?, ?) +""", client_info) + +# Commit and close the new database +new_conn.commit() +new_conn.close() + +# Close the existing database conn.close() \ No newline at end of file diff --git a/Export EFAlias/db_export_ip_json.py b/src/Export EFAlias/db_export_ip_json.py similarity index 96% rename from Export EFAlias/db_export_ip_json.py rename to src/Export EFAlias/db_export_ip_json.py index 1682c74..70ebcf2 100644 --- a/Export EFAlias/db_export_ip_json.py +++ b/src/Export EFAlias/db_export_ip_json.py @@ -1,29 +1,29 @@ -import sqlite3 -import json - -# Connect to the new database -new_conn = sqlite3.connect("Database.db") -new_cur = new_conn.cursor() - -# Fetch data from the ClientInfo table sorted by DateAdded in descending order -new_cur.execute(""" -SELECT Name, SearchableIPAddress, DateAdded FROM "IPAddresses" -ORDER BY DateAdded DESC -""") -client_info = new_cur.fetchall() - -# Create a list of dictionaries representing the client info -client_info_list = [] -for row in client_info: - client_info_list.append({ - "Name": row[0], - "SearchableIPAddress": row[1], - "DateAdded": row[2] - }) - -# Write the client info to a JSON file -with open("IPAddresses.json", "w") as f: - json.dump(client_info_list, f, indent=2) - -# Close the new database +import sqlite3 +import json + +# Connect to the new database +new_conn = sqlite3.connect("Database.db") +new_cur = new_conn.cursor() + +# Fetch data from the ClientInfo table sorted by DateAdded in descending order +new_cur.execute(""" +SELECT Name, SearchableIPAddress, DateAdded FROM "IPAddresses" +ORDER BY DateAdded DESC +""") +client_info = new_cur.fetchall() + +# Create a list of dictionaries representing the client info +client_info_list = [] +for row in client_info: + client_info_list.append({ + "Name": row[0], + "SearchableIPAddress": row[1], + "DateAdded": row[2] + }) + +# Write the client info to a JSON file +with open("IPAddresses.json", "w") as f: + json.dump(client_info_list, f, indent=2) + +# Close the new database new_conn.close() \ No newline at end of file diff --git a/Export EFChangeHistory/db_export_auditlog.py b/src/Export EFChangeHistory/db_export_auditlog.py similarity index 97% rename from Export EFChangeHistory/db_export_auditlog.py rename to src/Export EFChangeHistory/db_export_auditlog.py index 1b9b45e..2ad7184 100644 --- a/Export EFChangeHistory/db_export_auditlog.py +++ b/src/Export EFChangeHistory/db_export_auditlog.py @@ -1,93 +1,93 @@ -import sqlite3 - -# Connect to the existing database -existing_conn = sqlite3.connect("Database.db") -existing_cur = existing_conn.cursor() - -# Connect to the new database -new_conn = sqlite3.connect("Plutonium_Servers.db") -new_cur = new_conn.cursor() - -# Create the modified AuditLog table in the new database -new_cur.execute(""" -CREATE TABLE "AuditLog" ( - "ChangeHistoryId" INTEGER NOT NULL, - "TypeOfChange" TEXT NOT NULL, - "Time" TEXT NOT NULL, - "Data" TEXT, - "Command" TEXT, - "Origin" TEXT, - "Target" TEXT, - CONSTRAINT "PK_AuditLog" PRIMARY KEY("ChangeHistoryId" AUTOINCREMENT) -) -""") - -# Fetch data from existing EFChangeHistory, EFClients, and EFAlias tables -existing_cur.execute(""" -SELECT - EFChangeHistory.ChangeHistoryId, - EFChangeHistory.TypeOfChange, - EFChangeHistory.TimeChanged, - EFChangeHistory.Comment, - EFChangeHistory.CurrentValue, - EFChangeHistory.OriginEntityId, - EFChangeHistory.TargetEntityId -FROM - EFChangeHistory -""") -rows = existing_cur.fetchall() - -# Prepare a dictionary to store ClientId to Name mapping -client_name_map = {} - -for row in rows: - origin_entity_id = row[5] - target_entity_id = row[6] - - if origin_entity_id not in client_name_map: - existing_cur.execute(""" - SELECT - EFAlias.Name - FROM - EFClients - JOIN EFAlias ON EFClients.CurrentAliasId = EFAlias.AliasId - WHERE - EFClients.ClientId = ? - """, (origin_entity_id,)) - origin_name = existing_cur.fetchone() - if origin_name: - client_name_map[origin_entity_id] = origin_name[0].replace('^7', '') - else: - client_name_map[origin_entity_id] = 'Unknown' - - if target_entity_id not in client_name_map: - if target_entity_id == 0: - client_name_map[target_entity_id] = None - else: - existing_cur.execute(""" - SELECT - EFAlias.Name - FROM - EFClients - JOIN EFAlias ON EFClients.CurrentAliasId = EFAlias.AliasId - WHERE - EFClients.ClientId = ? - """, (target_entity_id,)) - target_name = existing_cur.fetchone() - if target_name: - client_name_map[target_entity_id] = target_name[0].replace('^7', '') - else: - client_name_map[target_entity_id] = 'Unknown' - - # Map TypeOfChange values to their corresponding text - type_of_change_map = {0: "Console", 1: "Punishment", 2: "Client"} - type_of_change = type_of_change_map[row[1]] - - # Insert the modified row into the new AuditLog table - new_row = (row[0], type_of_change, row[2], row[3], row[4], client_name_map[origin_entity_id], client_name_map[target_entity_id]) - new_cur.execute("INSERT INTO \"AuditLog\" (ChangeHistoryId, TypeOfChange, Time, Data, Command, Origin, Target) VALUES (?, ?, ?, ?, ?, ?, ?)", new_row) - -# Commit the changes and close the connections -new_conn.commit() -existing_conn.close() +import sqlite3 + +# Connect to the existing database +existing_conn = sqlite3.connect("Database.db") +existing_cur = existing_conn.cursor() + +# Connect to the new database +new_conn = sqlite3.connect("Plutonium_Servers.db") +new_cur = new_conn.cursor() + +# Create the modified AuditLog table in the new database +new_cur.execute(""" +CREATE TABLE "AuditLog" ( + "ChangeHistoryId" INTEGER NOT NULL, + "TypeOfChange" TEXT NOT NULL, + "Time" TEXT NOT NULL, + "Data" TEXT, + "Command" TEXT, + "Origin" TEXT, + "Target" TEXT, + CONSTRAINT "PK_AuditLog" PRIMARY KEY("ChangeHistoryId" AUTOINCREMENT) +) +""") + +# Fetch data from existing EFChangeHistory, EFClients, and EFAlias tables +existing_cur.execute(""" +SELECT + EFChangeHistory.ChangeHistoryId, + EFChangeHistory.TypeOfChange, + EFChangeHistory.TimeChanged, + EFChangeHistory.Comment, + EFChangeHistory.CurrentValue, + EFChangeHistory.OriginEntityId, + EFChangeHistory.TargetEntityId +FROM + EFChangeHistory +""") +rows = existing_cur.fetchall() + +# Prepare a dictionary to store ClientId to Name mapping +client_name_map = {} + +for row in rows: + origin_entity_id = row[5] + target_entity_id = row[6] + + if origin_entity_id not in client_name_map: + existing_cur.execute(""" + SELECT + EFAlias.Name + FROM + EFClients + JOIN EFAlias ON EFClients.CurrentAliasId = EFAlias.AliasId + WHERE + EFClients.ClientId = ? + """, (origin_entity_id,)) + origin_name = existing_cur.fetchone() + if origin_name: + client_name_map[origin_entity_id] = origin_name[0].replace('^7', '') + else: + client_name_map[origin_entity_id] = 'Unknown' + + if target_entity_id not in client_name_map: + if target_entity_id == 0: + client_name_map[target_entity_id] = None + else: + existing_cur.execute(""" + SELECT + EFAlias.Name + FROM + EFClients + JOIN EFAlias ON EFClients.CurrentAliasId = EFAlias.AliasId + WHERE + EFClients.ClientId = ? + """, (target_entity_id,)) + target_name = existing_cur.fetchone() + if target_name: + client_name_map[target_entity_id] = target_name[0].replace('^7', '') + else: + client_name_map[target_entity_id] = 'Unknown' + + # Map TypeOfChange values to their corresponding text + type_of_change_map = {0: "Console", 1: "Punishment", 2: "Client"} + type_of_change = type_of_change_map[row[1]] + + # Insert the modified row into the new AuditLog table + new_row = (row[0], type_of_change, row[2], row[3], row[4], client_name_map[origin_entity_id], client_name_map[target_entity_id]) + new_cur.execute("INSERT INTO \"AuditLog\" (ChangeHistoryId, TypeOfChange, Time, Data, Command, Origin, Target) VALUES (?, ?, ?, ?, ?, ?, ?)", new_row) + +# Commit the changes and close the connections +new_conn.commit() +existing_conn.close() new_conn.close() \ No newline at end of file diff --git a/Export EFChangeHistory/db_export_auditlog_json.py b/src/Export EFChangeHistory/db_export_auditlog_json.py similarity index 96% rename from Export EFChangeHistory/db_export_auditlog_json.py rename to src/Export EFChangeHistory/db_export_auditlog_json.py index 909fde0..5049400 100644 --- a/Export EFChangeHistory/db_export_auditlog_json.py +++ b/src/Export EFChangeHistory/db_export_auditlog_json.py @@ -1,33 +1,33 @@ -import sqlite3 -import json - -# Connect to the new database -new_conn = sqlite3.connect("Database.db") -new_cur = new_conn.cursor() - -# Fetch data from the EFChangeHistory table sorted by Time in descending order -new_cur.execute(""" -SELECT ChangeHistoryId, TypeOfChange, Time, Data, Command, Origin, Target FROM AuditLog -ORDER BY Time DESC -""") -ef_change_history = new_cur.fetchall() - -# Create a list of dictionaries representing the EFChangeHistory data -ef_change_history_list = [] -for row in ef_change_history: - ef_change_history_list.append({ - "ChangeHistoryId": row[0], - "TypeOfChange": row[1], - "Time": row[2], - "Data": row[3], - "Command": row[4], - "Origin": row[5], - "Target": row[6] - }) - -# Write the EFChangeHistory data to a JSON file -with open("AuditLog.json", "w") as f: - json.dump(ef_change_history_list, f, indent=2) - -# Close the new database -new_conn.close() +import sqlite3 +import json + +# Connect to the new database +new_conn = sqlite3.connect("Database.db") +new_cur = new_conn.cursor() + +# Fetch data from the EFChangeHistory table sorted by Time in descending order +new_cur.execute(""" +SELECT ChangeHistoryId, TypeOfChange, Time, Data, Command, Origin, Target FROM AuditLog +ORDER BY Time DESC +""") +ef_change_history = new_cur.fetchall() + +# Create a list of dictionaries representing the EFChangeHistory data +ef_change_history_list = [] +for row in ef_change_history: + ef_change_history_list.append({ + "ChangeHistoryId": row[0], + "TypeOfChange": row[1], + "Time": row[2], + "Data": row[3], + "Command": row[4], + "Origin": row[5], + "Target": row[6] + }) + +# Write the EFChangeHistory data to a JSON file +with open("AuditLog.json", "w") as f: + json.dump(ef_change_history_list, f, indent=2) + +# Close the new database +new_conn.close() diff --git a/Export EFClients/db_export_clients.py b/src/Export EFClients/db_export_clients.py similarity index 96% rename from Export EFClients/db_export_clients.py rename to src/Export EFClients/db_export_clients.py index a509637..b599fd6 100644 --- a/Export EFClients/db_export_clients.py +++ b/src/Export EFClients/db_export_clients.py @@ -1,86 +1,86 @@ -# EFClients - -import sqlite3 - -# Connect to the existing database -existing_conn = sqlite3.connect("Database.db") -existing_cur = existing_conn.cursor() - -# Connect to the new database -new_conn = sqlite3.connect("Plutonium_Servers.db") -new_cur = new_conn.cursor() - -new_cur.execute(""" -CREATE TABLE "Clients" ( - "Connections" INTEGER NOT NULL, - "Name" TEXT NOT NULL, - "FirstConnection" TEXT NOT NULL, - "Game" TEXT NOT NULL, - "LastConnection" TEXT NOT NULL, - "Level" TEXT NOT NULL, - "Masked" INTEGER NOT NULL, - "TotalConnectionTime" INTEGER NOT NULL, - "IP" TEXT -) -""") - -existing_cur.execute(""" -SELECT - EFClients.Connections, - EFClients.CurrentAliasId, - EFClients.FirstConnection, - EFClients.GameName, - EFClients.LastConnection, - EFClients.Level, - EFClients.Masked, - EFClients.TotalConnectionTime, - EFAlias.SearchableIPAddress -FROM - EFClients -JOIN - EFAlias ON EFClients.CurrentAliasId = EFAlias.AliasId -""") -rows = existing_cur.fetchall() - -for row in rows: - connections = row[0] - current_alias_id = row[1] - first_connection = row[2] - game_name = row[3] - last_connection = row[4] - level = row[5] - masked = row[6] - total_connection_time = row[7] - ip_address = row[8] - - # Retrieve client name - existing_cur.execute(""" - SELECT - EFAlias.Name - FROM - EFAlias - WHERE - EFAlias.AliasId = ? - """, (current_alias_id,)) - client_name = existing_cur.fetchone() - if client_name: - client_name = client_name[0].replace('^7', '') - else: - client_name = 'Unknown' - - # Map Level values to their corresponding text - level_map = {-1: "Banned", 0: "User", 1: "Trusted", 2: "Moderator", 3: "Administrator", 4: "Senior Administrator", 5: "Owner", 6: "Creator", 7: "Console"} - level = level_map.get(level, f"Unknown Level ({level})") - - # Map GameName values to their corresponding text - game_map = {5: "WaW", 6: "BO", 7: "BO2", 3: "MW3"} - game = game_map.get(game_name, f"Unknown Game ({game_name})") - - # Insert the modified row into the new Clients table - new_row = (connections, client_name, first_connection, game, last_connection, level, masked, total_connection_time, ip_address) - new_cur.execute("INSERT INTO Clients (Connections, Name, FirstConnection, Game, LastConnection, Level, Masked, TotalConnectionTime, IP) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", new_row) - -# Commit the changes and close the connections -new_conn.commit() -existing_conn.close() +# EFClients + +import sqlite3 + +# Connect to the existing database +existing_conn = sqlite3.connect("Database.db") +existing_cur = existing_conn.cursor() + +# Connect to the new database +new_conn = sqlite3.connect("Plutonium_Servers.db") +new_cur = new_conn.cursor() + +new_cur.execute(""" +CREATE TABLE "Clients" ( + "Connections" INTEGER NOT NULL, + "Name" TEXT NOT NULL, + "FirstConnection" TEXT NOT NULL, + "Game" TEXT NOT NULL, + "LastConnection" TEXT NOT NULL, + "Level" TEXT NOT NULL, + "Masked" INTEGER NOT NULL, + "TotalConnectionTime" INTEGER NOT NULL, + "IP" TEXT +) +""") + +existing_cur.execute(""" +SELECT + EFClients.Connections, + EFClients.CurrentAliasId, + EFClients.FirstConnection, + EFClients.GameName, + EFClients.LastConnection, + EFClients.Level, + EFClients.Masked, + EFClients.TotalConnectionTime, + EFAlias.SearchableIPAddress +FROM + EFClients +JOIN + EFAlias ON EFClients.CurrentAliasId = EFAlias.AliasId +""") +rows = existing_cur.fetchall() + +for row in rows: + connections = row[0] + current_alias_id = row[1] + first_connection = row[2] + game_name = row[3] + last_connection = row[4] + level = row[5] + masked = row[6] + total_connection_time = row[7] + ip_address = row[8] + + # Retrieve client name + existing_cur.execute(""" + SELECT + EFAlias.Name + FROM + EFAlias + WHERE + EFAlias.AliasId = ? + """, (current_alias_id,)) + client_name = existing_cur.fetchone() + if client_name: + client_name = client_name[0].replace('^7', '') + else: + client_name = 'Unknown' + + # Map Level values to their corresponding text + level_map = {-1: "Banned", 0: "User", 1: "Trusted", 2: "Moderator", 3: "Administrator", 4: "Senior Administrator", 5: "Owner", 6: "Creator", 7: "Console"} + level = level_map.get(level, f"Unknown Level ({level})") + + # Map GameName values to their corresponding text + game_map = {5: "WaW", 6: "BO", 7: "BO2", 3: "MW3"} + game = game_map.get(game_name, f"Unknown Game ({game_name})") + + # Insert the modified row into the new Clients table + new_row = (connections, client_name, first_connection, game, last_connection, level, masked, total_connection_time, ip_address) + new_cur.execute("INSERT INTO Clients (Connections, Name, FirstConnection, Game, LastConnection, Level, Masked, TotalConnectionTime, IP) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", new_row) + +# Commit the changes and close the connections +new_conn.commit() +existing_conn.close() new_conn.close() \ No newline at end of file diff --git a/Export EFClients/db_export_clients_json.py b/src/Export EFClients/db_export_clients_json.py similarity index 96% rename from Export EFClients/db_export_clients_json.py rename to src/Export EFClients/db_export_clients_json.py index 6a8e8f0..1b82d53 100644 --- a/Export EFClients/db_export_clients_json.py +++ b/src/Export EFClients/db_export_clients_json.py @@ -1,35 +1,35 @@ -import sqlite3 -import json - -# Connect to the new database -new_conn = sqlite3.connect("Database.db") -new_cur = new_conn.cursor() - -# Fetch data from the EFClients table -new_cur.execute(""" -SELECT Connections, Name, FirstConnection, Game, LastConnection, Level, Masked, TotalConnectionTime -FROM Clients -ORDER BY LastConnection DESC -""") -clients = new_cur.fetchall() - -# Create a list of dictionaries representing the clients -clients_list = [] -for row in clients: - clients_list.append({ - "Connections": row[0], - "Name": row[1], - "FirstConnection": row[2], - "Game": row[3], - "LastConnection": row[4], - "Level": row[5], - "Masked": row[6], - "TotalConnectionTime": row[7] - }) - -# Write the clients to a JSON file -with open("Clients.json", "w") as f: - json.dump(clients_list, f, indent=2) - -# Close the new database +import sqlite3 +import json + +# Connect to the new database +new_conn = sqlite3.connect("Database.db") +new_cur = new_conn.cursor() + +# Fetch data from the EFClients table +new_cur.execute(""" +SELECT Connections, Name, FirstConnection, Game, LastConnection, Level, Masked, TotalConnectionTime +FROM Clients +ORDER BY LastConnection DESC +""") +clients = new_cur.fetchall() + +# Create a list of dictionaries representing the clients +clients_list = [] +for row in clients: + clients_list.append({ + "Connections": row[0], + "Name": row[1], + "FirstConnection": row[2], + "Game": row[3], + "LastConnection": row[4], + "Level": row[5], + "Masked": row[6], + "TotalConnectionTime": row[7] + }) + +# Write the clients to a JSON file +with open("Clients.json", "w") as f: + json.dump(clients_list, f, indent=2) + +# Close the new database new_conn.close() \ No newline at end of file diff --git a/Export EFMaps/db_export_maps.py b/src/Export EFMaps/db_export_maps.py similarity index 96% rename from Export EFMaps/db_export_maps.py rename to src/Export EFMaps/db_export_maps.py index 480f220..ec810ae 100644 --- a/Export EFMaps/db_export_maps.py +++ b/src/Export EFMaps/db_export_maps.py @@ -1,50 +1,50 @@ -# EFMaps - -import sqlite3 - -# Connect to the existing database -existing_conn = sqlite3.connect("Database.db") -existing_cur = existing_conn.cursor() - -# Connect to the new database -new_conn = sqlite3.connect("Plutonium_Servers.db") -new_cur = new_conn.cursor() - -# Create the Maps table in the new_database.db -new_cur.execute(""" -CREATE TABLE IF NOT EXISTS "Maps" ( - "MapId" INTEGER NOT NULL, - "CreatedDateTime" TEXT NOT NULL, - "Name" TEXT NOT NULL, - "Game" TEXT NOT NULL, - CONSTRAINT "PK_Maps" PRIMARY KEY("MapId" AUTOINCREMENT) -) -""") - -# Fetch data from the existing EFMaps table -existing_cur.execute(""" -SELECT - MapId, CreatedDateTime, Name, Game -FROM - EFMaps -""") -rows = existing_cur.fetchall() - -# Modify the data according to the requirements -modified_rows = [] -for row in rows: - game_map = {5: "WaW", 6: "BO", 7: "BO2", 3: "MW3"} - game = game_map.get(row[3], f"Unknown Game ({row[3]})") - modified_rows.append((row[0], row[1], row[2], game)) - -# Insert the modified data into the Maps table in the new_database.db -new_cur.executemany(""" -INSERT INTO "Maps" ( - MapId, CreatedDateTime, Name, Game -) VALUES (?, ?, ?, ?) -""", modified_rows) - -# Commit the changes and close the connections -new_conn.commit() -existing_conn.close() +# EFMaps + +import sqlite3 + +# Connect to the existing database +existing_conn = sqlite3.connect("Database.db") +existing_cur = existing_conn.cursor() + +# Connect to the new database +new_conn = sqlite3.connect("Plutonium_Servers.db") +new_cur = new_conn.cursor() + +# Create the Maps table in the new_database.db +new_cur.execute(""" +CREATE TABLE IF NOT EXISTS "Maps" ( + "MapId" INTEGER NOT NULL, + "CreatedDateTime" TEXT NOT NULL, + "Name" TEXT NOT NULL, + "Game" TEXT NOT NULL, + CONSTRAINT "PK_Maps" PRIMARY KEY("MapId" AUTOINCREMENT) +) +""") + +# Fetch data from the existing EFMaps table +existing_cur.execute(""" +SELECT + MapId, CreatedDateTime, Name, Game +FROM + EFMaps +""") +rows = existing_cur.fetchall() + +# Modify the data according to the requirements +modified_rows = [] +for row in rows: + game_map = {5: "WaW", 6: "BO", 7: "BO2", 3: "MW3"} + game = game_map.get(row[3], f"Unknown Game ({row[3]})") + modified_rows.append((row[0], row[1], row[2], game)) + +# Insert the modified data into the Maps table in the new_database.db +new_cur.executemany(""" +INSERT INTO "Maps" ( + MapId, CreatedDateTime, Name, Game +) VALUES (?, ?, ?, ?) +""", modified_rows) + +# Commit the changes and close the connections +new_conn.commit() +existing_conn.close() new_conn.close() \ No newline at end of file diff --git a/Export EFMaps/db_export_maps_json.py b/src/Export EFMaps/db_export_maps_json.py similarity index 95% rename from Export EFMaps/db_export_maps_json.py rename to src/Export EFMaps/db_export_maps_json.py index 068f19c..df3775a 100644 --- a/Export EFMaps/db_export_maps_json.py +++ b/src/Export EFMaps/db_export_maps_json.py @@ -1,35 +1,35 @@ -import sqlite3 -import json - -# Connect to the new_database.db -new_conn = sqlite3.connect("Database.db") -new_cur = new_conn.cursor() - -# Fetch data from the Maps table sorted by MapId DESC -new_cur.execute(""" -SELECT - MapId, CreatedDateTime, Name, Game -FROM - Maps -ORDER BY - MapId DESC -""") -rows = new_cur.fetchall() - -# Convert fetched data into a list of dictionaries -maps_list = [] -for row in rows: - map_dict = { - "MapId": row[0], - "CreatedDateTime": row[1], - "Name": row[2], - "Game": row[3] - } - maps_list.append(map_dict) - -# Write the list of dictionaries to a JSON file -with open("maps_export.json", "w") as json_file: - json.dump(maps_list, json_file, indent=4) - -# Close the connection +import sqlite3 +import json + +# Connect to the new_database.db +new_conn = sqlite3.connect("Database.db") +new_cur = new_conn.cursor() + +# Fetch data from the Maps table sorted by MapId DESC +new_cur.execute(""" +SELECT + MapId, CreatedDateTime, Name, Game +FROM + Maps +ORDER BY + MapId DESC +""") +rows = new_cur.fetchall() + +# Convert fetched data into a list of dictionaries +maps_list = [] +for row in rows: + map_dict = { + "MapId": row[0], + "CreatedDateTime": row[1], + "Name": row[2], + "Game": row[3] + } + maps_list.append(map_dict) + +# Write the list of dictionaries to a JSON file +with open("maps_export.json", "w") as json_file: + json.dump(maps_list, json_file, indent=4) + +# Close the connection new_conn.close() \ No newline at end of file diff --git a/Export EFMeta/db_export_metadata.py b/src/Export EFMeta/db_export_metadata.py similarity index 96% rename from Export EFMeta/db_export_metadata.py rename to src/Export EFMeta/db_export_metadata.py index 3eeaa3a..503cb25 100644 --- a/Export EFMeta/db_export_metadata.py +++ b/src/Export EFMeta/db_export_metadata.py @@ -1,84 +1,84 @@ -# EFMeta - -import sqlite3 - -# Connect to the existing database -existing_conn = sqlite3.connect("Database.db") -existing_cur = existing_conn.cursor() - -# Connect to the new database -new_conn = sqlite3.connect("Plutonium_Servers.db") -new_cur = new_conn.cursor() - -# Create the modified Metadata table in the new database -new_cur.execute(""" -CREATE TABLE "Metadata" ( - "MetaId" INTEGER NOT NULL, - "Name" TEXT NOT NULL, - "Timestamp" TEXT NOT NULL, - "Note" TEXT NOT NULL, - "Value" TEXT NOT NULL -) -""") - -# Fetch data from existing EFMeta -existing_cur.execute(""" -SELECT - EFMeta.MetaId, - EFMeta.ClientId, - EFMeta.Created, - EFMeta.Key, - EFMeta.Value -FROM - EFMeta -""") -rows = existing_cur.fetchall() - -for row in rows: - meta_id = row[0] - client_id = row[1] - created = row[2] - key = row[3] - value = row[4] - - # Retrieve CurrentAliasId for the ClientId - existing_cur.execute(""" - SELECT - EFClients.CurrentAliasId - FROM - EFClients - WHERE - EFClients.ClientId = ? - """, (client_id,)) - current_alias_id = existing_cur.fetchone() - if current_alias_id: - current_alias_id = current_alias_id[0] - else: - current_alias_id = None - - # Retrieve client name - if current_alias_id: - existing_cur.execute(""" - SELECT - EFAlias.Name - FROM - EFAlias - WHERE - EFAlias.AliasId = ? - """, (current_alias_id,)) - client_name = existing_cur.fetchone() - if client_name: - client_name = client_name[0].replace('^7', '') - else: - client_name = 'Unknown' - else: - client_name = 'Unknown' - - # Insert the modified row into the new Metadata table - new_row = (meta_id, client_name, created, key, value) - new_cur.execute("INSERT INTO Metadata (MetaId, Name, Timestamp, Note, Value) VALUES (?, ?, ?, ?, ?)", new_row) - -# Commit the changes and close the connections -new_conn.commit() -existing_conn.close() +# EFMeta + +import sqlite3 + +# Connect to the existing database +existing_conn = sqlite3.connect("Database.db") +existing_cur = existing_conn.cursor() + +# Connect to the new database +new_conn = sqlite3.connect("Plutonium_Servers.db") +new_cur = new_conn.cursor() + +# Create the modified Metadata table in the new database +new_cur.execute(""" +CREATE TABLE "Metadata" ( + "MetaId" INTEGER NOT NULL, + "Name" TEXT NOT NULL, + "Timestamp" TEXT NOT NULL, + "Note" TEXT NOT NULL, + "Value" TEXT NOT NULL +) +""") + +# Fetch data from existing EFMeta +existing_cur.execute(""" +SELECT + EFMeta.MetaId, + EFMeta.ClientId, + EFMeta.Created, + EFMeta.Key, + EFMeta.Value +FROM + EFMeta +""") +rows = existing_cur.fetchall() + +for row in rows: + meta_id = row[0] + client_id = row[1] + created = row[2] + key = row[3] + value = row[4] + + # Retrieve CurrentAliasId for the ClientId + existing_cur.execute(""" + SELECT + EFClients.CurrentAliasId + FROM + EFClients + WHERE + EFClients.ClientId = ? + """, (client_id,)) + current_alias_id = existing_cur.fetchone() + if current_alias_id: + current_alias_id = current_alias_id[0] + else: + current_alias_id = None + + # Retrieve client name + if current_alias_id: + existing_cur.execute(""" + SELECT + EFAlias.Name + FROM + EFAlias + WHERE + EFAlias.AliasId = ? + """, (current_alias_id,)) + client_name = existing_cur.fetchone() + if client_name: + client_name = client_name[0].replace('^7', '') + else: + client_name = 'Unknown' + else: + client_name = 'Unknown' + + # Insert the modified row into the new Metadata table + new_row = (meta_id, client_name, created, key, value) + new_cur.execute("INSERT INTO Metadata (MetaId, Name, Timestamp, Note, Value) VALUES (?, ?, ?, ?, ?)", new_row) + +# Commit the changes and close the connections +new_conn.commit() +existing_conn.close() new_conn.close() \ No newline at end of file diff --git a/Export EFMeta/db_export_metadata_json.py b/src/Export EFMeta/db_export_metadata_json.py similarity index 95% rename from Export EFMeta/db_export_metadata_json.py rename to src/Export EFMeta/db_export_metadata_json.py index b03b064..005949f 100644 --- a/Export EFMeta/db_export_metadata_json.py +++ b/src/Export EFMeta/db_export_metadata_json.py @@ -1,32 +1,32 @@ -import sqlite3 -import json - -# Connect to the new database -new_conn = sqlite3.connect("Database.db") -new_cur = new_conn.cursor() - -# Fetch data from the Metadata table -new_cur.execute(""" -SELECT MetaId, Name, Timestamp, Note, Value -FROM Metadata -ORDER BY Timestamp DESC -""") -metadata = new_cur.fetchall() - -# Create a list of dictionaries representing the metadata -metadata_list = [] -for row in metadata: - metadata_list.append({ - "MetaId": row[0], - "Name": row[1], - "Timestamp": row[2], - "Note": row[3], - "Value": row[4] - }) - -# Write the metadata to a JSON file -with open("Metadata.json", "w") as f: - json.dump(metadata_list, f, indent=2) - -# Close the new database +import sqlite3 +import json + +# Connect to the new database +new_conn = sqlite3.connect("Database.db") +new_cur = new_conn.cursor() + +# Fetch data from the Metadata table +new_cur.execute(""" +SELECT MetaId, Name, Timestamp, Note, Value +FROM Metadata +ORDER BY Timestamp DESC +""") +metadata = new_cur.fetchall() + +# Create a list of dictionaries representing the metadata +metadata_list = [] +for row in metadata: + metadata_list.append({ + "MetaId": row[0], + "Name": row[1], + "Timestamp": row[2], + "Note": row[3], + "Value": row[4] + }) + +# Write the metadata to a JSON file +with open("Metadata.json", "w") as f: + json.dump(metadata_list, f, indent=2) + +# Close the new database new_conn.close() \ No newline at end of file diff --git a/Export EFPenalties/db_export_penalties.py b/src/Export EFPenalties/db_export_penalties.py similarity index 96% rename from Export EFPenalties/db_export_penalties.py rename to src/Export EFPenalties/db_export_penalties.py index b9b7764..698113a 100644 --- a/Export EFPenalties/db_export_penalties.py +++ b/src/Export EFPenalties/db_export_penalties.py @@ -1,133 +1,133 @@ -import sqlite3 -import re - -# Connect to the existing database -existing_conn = sqlite3.connect("Database.db") -existing_cur = existing_conn.cursor() - -# Connect to the new database -new_conn = sqlite3.connect("Plutonium_Servers.db") -new_cur = new_conn.cursor() - -# Create the modified Penalties table in the new database -new_cur.execute(""" -CREATE TABLE "Penalties" ( - "PenaltyId" INTEGER NOT NULL, - "AutomatedOffense" TEXT NOT NULL, - "Expires" INTEGER, -- This line is modified to allow NULL values - "EvadedOffense" TEXT NOT NULL, - "Offender" TEXT NOT NULL, - "Offense" TEXT NOT NULL, - "Punisher" TEXT NOT NULL, - "Type" TEXT NOT NULL, - "Timestamp" INTEGER NOT NULL -) -""") - -# Fetch data from existing EFPenalties -existing_cur.execute(""" -SELECT - EFPenalties.PenaltyId, - EFPenalties.AutomatedOffense, - EFPenalties.Expires, - EFPenalties.IsEvadedOffense, - EFPenalties.OffenderId, - EFPenalties.Offense, - EFPenalties.PunisherId, - EFPenalties.Type, - EFPenalties."When" -FROM - EFPenalties -""") -rows = existing_cur.fetchall() - -for row in rows: - penalty_id = row[0] - automated_offense = row[1] - expires = row[2] - evaded_offense = row[3] - offender_id = row[4] - offense = row[5] - punisher_id = row[6] - penalty_type = row[7] - timestamp = row[8] - - # Retrieve offender name - existing_cur.execute(""" - SELECT - EFAlias.Name - FROM - EFAlias - INNER JOIN - EFClients ON EFAlias.AliasId = EFClients.CurrentAliasId - WHERE - EFClients.ClientId = ? - """, (offender_id,)) - offender_name = existing_cur.fetchone() - if offender_name: - offender_name = offender_name[0].replace('^7', '') - else: - offender_name = 'Unknown' - - # Retrieve punisher name - existing_cur.execute(""" - SELECT - EFAlias.Name - FROM - EFAlias - INNER JOIN - EFClients ON EFAlias.AliasId = EFClients.CurrentAliasId - WHERE - EFClients.ClientId = ? - """, (punisher_id,)) - punisher_name = existing_cur.fetchone() - if punisher_name: - punisher_name = punisher_name[0].replace('^7', '') - else: - punisher_name = 'Unknown' - - # Replace Type values - type_map = {0: "Report", 1: "Warning", 2: "Flag", 3: "Kick", 4: "Temp Ban", 5: "Perm Ban", 6: "Unban", 8: "Unflag"} - penalty_type = type_map.get(penalty_type, f"Unknown Type ({penalty_type})") - - # Set AutomatedOffense value to 'Yes' or 'No' - automated_offense_patterns = [r"VPNs are not allowed", - r"Ping is too high!", - r"name is not allowed"] # Simplified the patterns - - # Search the 'Offense' field for specified patterns - for pattern in automated_offense_patterns: - if re.search(pattern, offense): # Using re.search with 'offense' instead of 'automated_offense' - automated_offense = "Yes" - break - else: - automated_offense = "No" - - # Set EvadedOffense values to 'Yes' or 'No' - evaded_offense = "Yes" if evaded_offense == 1 else "No" - - # Set Expires value to 'Never' if it is NULL - expires = "Never" if expires is None else expires - - # Insert the modified row into the new Penalties table - new_cur.execute(""" - INSERT INTO Penalties ( - PenaltyId, - AutomatedOffense, - Expires, - EvadedOffense, - Offender, - Offense, - Punisher, - Type, - Timestamp - ) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) - """, (penalty_id, automated_offense, expires, evaded_offense, offender_name, offense, punisher_name, penalty_type, timestamp)) - -# Commit changes and close the new database -new_conn.commit() -new_conn.close() - -# Close the existing database +import sqlite3 +import re + +# Connect to the existing database +existing_conn = sqlite3.connect("Database.db") +existing_cur = existing_conn.cursor() + +# Connect to the new database +new_conn = sqlite3.connect("Plutonium_Servers.db") +new_cur = new_conn.cursor() + +# Create the modified Penalties table in the new database +new_cur.execute(""" +CREATE TABLE "Penalties" ( + "PenaltyId" INTEGER NOT NULL, + "AutomatedOffense" TEXT NOT NULL, + "Expires" INTEGER, -- This line is modified to allow NULL values + "EvadedOffense" TEXT NOT NULL, + "Offender" TEXT NOT NULL, + "Offense" TEXT NOT NULL, + "Punisher" TEXT NOT NULL, + "Type" TEXT NOT NULL, + "Timestamp" INTEGER NOT NULL +) +""") + +# Fetch data from existing EFPenalties +existing_cur.execute(""" +SELECT + EFPenalties.PenaltyId, + EFPenalties.AutomatedOffense, + EFPenalties.Expires, + EFPenalties.IsEvadedOffense, + EFPenalties.OffenderId, + EFPenalties.Offense, + EFPenalties.PunisherId, + EFPenalties.Type, + EFPenalties."When" +FROM + EFPenalties +""") +rows = existing_cur.fetchall() + +for row in rows: + penalty_id = row[0] + automated_offense = row[1] + expires = row[2] + evaded_offense = row[3] + offender_id = row[4] + offense = row[5] + punisher_id = row[6] + penalty_type = row[7] + timestamp = row[8] + + # Retrieve offender name + existing_cur.execute(""" + SELECT + EFAlias.Name + FROM + EFAlias + INNER JOIN + EFClients ON EFAlias.AliasId = EFClients.CurrentAliasId + WHERE + EFClients.ClientId = ? + """, (offender_id,)) + offender_name = existing_cur.fetchone() + if offender_name: + offender_name = offender_name[0].replace('^7', '') + else: + offender_name = 'Unknown' + + # Retrieve punisher name + existing_cur.execute(""" + SELECT + EFAlias.Name + FROM + EFAlias + INNER JOIN + EFClients ON EFAlias.AliasId = EFClients.CurrentAliasId + WHERE + EFClients.ClientId = ? + """, (punisher_id,)) + punisher_name = existing_cur.fetchone() + if punisher_name: + punisher_name = punisher_name[0].replace('^7', '') + else: + punisher_name = 'Unknown' + + # Replace Type values + type_map = {0: "Report", 1: "Warning", 2: "Flag", 3: "Kick", 4: "Temp Ban", 5: "Perm Ban", 6: "Unban", 8: "Unflag"} + penalty_type = type_map.get(penalty_type, f"Unknown Type ({penalty_type})") + + # Set AutomatedOffense value to 'Yes' or 'No' + automated_offense_patterns = [r"VPNs are not allowed", + r"Ping is too high!", + r"name is not allowed"] # Simplified the patterns + + # Search the 'Offense' field for specified patterns + for pattern in automated_offense_patterns: + if re.search(pattern, offense): # Using re.search with 'offense' instead of 'automated_offense' + automated_offense = "Yes" + break + else: + automated_offense = "No" + + # Set EvadedOffense values to 'Yes' or 'No' + evaded_offense = "Yes" if evaded_offense == 1 else "No" + + # Set Expires value to 'Never' if it is NULL + expires = "Never" if expires is None else expires + + # Insert the modified row into the new Penalties table + new_cur.execute(""" + INSERT INTO Penalties ( + PenaltyId, + AutomatedOffense, + Expires, + EvadedOffense, + Offender, + Offense, + Punisher, + Type, + Timestamp + ) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + """, (penalty_id, automated_offense, expires, evaded_offense, offender_name, offense, punisher_name, penalty_type, timestamp)) + +# Commit changes and close the new database +new_conn.commit() +new_conn.close() + +# Close the existing database existing_conn.close() \ No newline at end of file diff --git a/Export EFPenalties/db_export_penalties_json.py b/src/Export EFPenalties/db_export_penalties_json.py similarity index 96% rename from Export EFPenalties/db_export_penalties_json.py rename to src/Export EFPenalties/db_export_penalties_json.py index 55e3fe6..dc7d45b 100644 --- a/Export EFPenalties/db_export_penalties_json.py +++ b/src/Export EFPenalties/db_export_penalties_json.py @@ -1,36 +1,36 @@ -import sqlite3 -import json - -# Connect to the new database -new_conn = sqlite3.connect("Database.db") -new_cur = new_conn.cursor() - -# Fetch data from the Penalties table -new_cur.execute(""" -SELECT PenaltyId, AutomatedOffense, Expires, EvadedOffense, Offender, Offense, Punisher, Type, Timestamp -FROM Penalties -ORDER BY Timestamp DESC -""") -penalties = new_cur.fetchall() - -# Create a list of dictionaries representing the penalties -penalties_list = [] -for row in penalties: - penalties_list.append({ - "PenaltyId": row[0], - "AutomatedOffense": row[1], - "Expires": row[2], - "EvadedOffense": row[3], - "Offender": row[4], - "Offense": row[5], - "Punisher": row[6], - "Type": row[7], - "Timestamp": row[8] - }) - -# Write the penalties to a JSON file -with open("Penalties.json", "w") as f: - json.dump(penalties_list, f, indent=2) - -# Close the new database +import sqlite3 +import json + +# Connect to the new database +new_conn = sqlite3.connect("Database.db") +new_cur = new_conn.cursor() + +# Fetch data from the Penalties table +new_cur.execute(""" +SELECT PenaltyId, AutomatedOffense, Expires, EvadedOffense, Offender, Offense, Punisher, Type, Timestamp +FROM Penalties +ORDER BY Timestamp DESC +""") +penalties = new_cur.fetchall() + +# Create a list of dictionaries representing the penalties +penalties_list = [] +for row in penalties: + penalties_list.append({ + "PenaltyId": row[0], + "AutomatedOffense": row[1], + "Expires": row[2], + "EvadedOffense": row[3], + "Offender": row[4], + "Offense": row[5], + "Punisher": row[6], + "Type": row[7], + "Timestamp": row[8] + }) + +# Write the penalties to a JSON file +with open("Penalties.json", "w") as f: + json.dump(penalties_list, f, indent=2) + +# Close the new database new_conn.close() \ No newline at end of file diff --git a/Export EFPenaltyIdentifiers/db_export_penaltyidentifiers.py b/src/Export EFPenaltyIdentifiers/db_export_penaltyidentifiers.py similarity index 95% rename from Export EFPenaltyIdentifiers/db_export_penaltyidentifiers.py rename to src/Export EFPenaltyIdentifiers/db_export_penaltyidentifiers.py index 3559ad7..d1f30fb 100644 --- a/Export EFPenaltyIdentifiers/db_export_penaltyidentifiers.py +++ b/src/Export EFPenaltyIdentifiers/db_export_penaltyidentifiers.py @@ -1,71 +1,71 @@ -import sqlite3 - -# Connect to the existing database -existing_conn = sqlite3.connect("Database.db") -existing_cur = existing_conn.cursor() - -# Connect to the new database -new_conn = sqlite3.connect("Plutonium_Servers.db") -new_cur = new_conn.cursor() - -# Create the modified EFPenaltyIdentifiers table in the new database -new_cur.execute(""" -CREATE TABLE "PenaltyIdentifiers" ( - "PenaltyIdentifierId" INTEGER NOT NULL, - "PenaltyId" INTEGER NOT NULL, - "Created" TEXT NOT NULL, - "Client" TEXT NOT NULL -) -""") - -existing_cur.execute(""" -SELECT - EFPenaltyIdentifiers.PenaltyIdentifierId, - EFPenaltyIdentifiers.PenaltyId, - EFPenaltyIdentifiers.CreatedDateTime, - EFPenaltyIdentifiers.NetworkId -FROM - EFPenaltyIdentifiers -""") -rows = existing_cur.fetchall() - -for row in rows: - penalty_identifier_id = row[0] - penalty_id = row[1] - created = row[2] - network_id = row[3] - - - existing_cur.execute(""" - SELECT - EFAlias.Name - FROM - EFAlias - INNER JOIN - EFClients ON EFAlias.AliasId = EFClients.CurrentAliasId - WHERE - EFClients.NetworkId = ? - """, (network_id,)) - client_name = existing_cur.fetchone() - if client_name: - client_name = client_name[0].replace('^7', '') - else: - client_name = 'Unknown' - - - new_cur.execute(""" - INSERT INTO PenaltyIdentifiers ( - PenaltyIdentifierId, - PenaltyId, - Created, - Client - ) - VALUES (?, ?, ?, ?) - """, (penalty_identifier_id, penalty_id, created, client_name)) - -# Commit changes and close the new database -new_conn.commit() -new_conn.close() - -# Close the existing database +import sqlite3 + +# Connect to the existing database +existing_conn = sqlite3.connect("Database.db") +existing_cur = existing_conn.cursor() + +# Connect to the new database +new_conn = sqlite3.connect("Plutonium_Servers.db") +new_cur = new_conn.cursor() + +# Create the modified EFPenaltyIdentifiers table in the new database +new_cur.execute(""" +CREATE TABLE "PenaltyIdentifiers" ( + "PenaltyIdentifierId" INTEGER NOT NULL, + "PenaltyId" INTEGER NOT NULL, + "Created" TEXT NOT NULL, + "Client" TEXT NOT NULL +) +""") + +existing_cur.execute(""" +SELECT + EFPenaltyIdentifiers.PenaltyIdentifierId, + EFPenaltyIdentifiers.PenaltyId, + EFPenaltyIdentifiers.CreatedDateTime, + EFPenaltyIdentifiers.NetworkId +FROM + EFPenaltyIdentifiers +""") +rows = existing_cur.fetchall() + +for row in rows: + penalty_identifier_id = row[0] + penalty_id = row[1] + created = row[2] + network_id = row[3] + + + existing_cur.execute(""" + SELECT + EFAlias.Name + FROM + EFAlias + INNER JOIN + EFClients ON EFAlias.AliasId = EFClients.CurrentAliasId + WHERE + EFClients.NetworkId = ? + """, (network_id,)) + client_name = existing_cur.fetchone() + if client_name: + client_name = client_name[0].replace('^7', '') + else: + client_name = 'Unknown' + + + new_cur.execute(""" + INSERT INTO PenaltyIdentifiers ( + PenaltyIdentifierId, + PenaltyId, + Created, + Client + ) + VALUES (?, ?, ?, ?) + """, (penalty_identifier_id, penalty_id, created, client_name)) + +# Commit changes and close the new database +new_conn.commit() +new_conn.close() + +# Close the existing database existing_conn.close() \ No newline at end of file diff --git a/Export EFPenaltyIdentifiers/db_export_penaltyidentifiers_json.py b/src/Export EFPenaltyIdentifiers/db_export_penaltyidentifiers_json.py similarity index 96% rename from Export EFPenaltyIdentifiers/db_export_penaltyidentifiers_json.py rename to src/Export EFPenaltyIdentifiers/db_export_penaltyidentifiers_json.py index e802970..3cca34c 100644 --- a/Export EFPenaltyIdentifiers/db_export_penaltyidentifiers_json.py +++ b/src/Export EFPenaltyIdentifiers/db_export_penaltyidentifiers_json.py @@ -1,26 +1,26 @@ -import sqlite3 -import json - -new_conn = sqlite3.connect("Plutonium_Servers.db") -new_cur = new_conn.cursor() - -new_cur.execute(""" -SELECT PenaltyIdentifierId, PenaltyId, Created, Client -FROM PenaltyIdentifiers -ORDER BY Created DESC -""") -penalty_identifiers = new_cur.fetchall() - -penalty_identifiers_list = [] -for row in penalty_identifiers: - penalty_identifiers_list.append({ - "PenaltyIdentifierId": row[0], - "PenaltyId": row[1], - "Created": row[2], - "Client": row[3] - }) - -with open("PenaltyIdentifiers.json", "w") as f: - json.dump(penalty_identifiers_list, f, indent=2) - +import sqlite3 +import json + +new_conn = sqlite3.connect("Plutonium_Servers.db") +new_cur = new_conn.cursor() + +new_cur.execute(""" +SELECT PenaltyIdentifierId, PenaltyId, Created, Client +FROM PenaltyIdentifiers +ORDER BY Created DESC +""") +penalty_identifiers = new_cur.fetchall() + +penalty_identifiers_list = [] +for row in penalty_identifiers: + penalty_identifiers_list.append({ + "PenaltyIdentifierId": row[0], + "PenaltyId": row[1], + "Created": row[2], + "Client": row[3] + }) + +with open("PenaltyIdentifiers.json", "w") as f: + json.dump(penalty_identifiers_list, f, indent=2) + new_conn.close() \ No newline at end of file diff --git a/Export EFServers/db_export_servers.py b/src/Export EFServers/db_export_servers.py similarity index 96% rename from Export EFServers/db_export_servers.py rename to src/Export EFServers/db_export_servers.py index 32d60a0..fff1ee3 100644 --- a/Export EFServers/db_export_servers.py +++ b/src/Export EFServers/db_export_servers.py @@ -1,67 +1,67 @@ -# EFServers - -import sqlite3 - -# Connect to the existing database -existing_conn = sqlite3.connect("Database.db") -existing_cur = existing_conn.cursor() - -# Connect to the new database -new_conn = sqlite3.connect("Plutonium_Servers.db") -new_cur = new_conn.cursor() - -# Create the modified Servers table in the new database -new_cur.execute(""" -CREATE TABLE "Servers" ( - "ServerId" INTEGER NOT NULL, - "Active" INTEGER NOT NULL, - "Port" INTEGER NOT NULL, - "Endpoint" TEXT NOT NULL, - "Game" TEXT NOT NULL, - "ServerName" TEXT NOT NULL, - "Password" TEXT NOT NULL -) -""") - -# Fetch data from existing EFServers -existing_cur.execute(""" -SELECT ServerId, Active, Port, Endpoint, GameName, HostName, IsPasswordProtected -FROM EFServers -""") -rows = existing_cur.fetchall() - -# Define the game name mapping -game_mapping = {5: "WaW", 7: "BO2", 6: "BO", 3: "MW3"} - -for row in rows: - server_id = row[0] - active = row[1] - port = row[2] - endpoint = row[3] - game_name = row[4] - server_name = row[5] - is_password_protected = row[6] - - # Replace the game_name with corresponding text - game_name = game_mapping.get(game_name, game_name) - - # Replace the IsPasswordProtected values with 'Yes' or 'No' - password = "Yes" if is_password_protected == 1 else "No" - - # Insert the modified row into the new Servers table - new_cur.execute(""" - INSERT INTO Servers ( - ServerId, - Active, - Port, - Endpoint, - Game, - ServerName, - Password - ) VALUES (?, ?, ?, ?, ?, ?, ?) - """, (server_id, active, port, endpoint, game_name, server_name, password)) - -# Commit the changes and close the connections -new_conn.commit() -existing_conn.close() +# EFServers + +import sqlite3 + +# Connect to the existing database +existing_conn = sqlite3.connect("Database.db") +existing_cur = existing_conn.cursor() + +# Connect to the new database +new_conn = sqlite3.connect("Plutonium_Servers.db") +new_cur = new_conn.cursor() + +# Create the modified Servers table in the new database +new_cur.execute(""" +CREATE TABLE "Servers" ( + "ServerId" INTEGER NOT NULL, + "Active" INTEGER NOT NULL, + "Port" INTEGER NOT NULL, + "Endpoint" TEXT NOT NULL, + "Game" TEXT NOT NULL, + "ServerName" TEXT NOT NULL, + "Password" TEXT NOT NULL +) +""") + +# Fetch data from existing EFServers +existing_cur.execute(""" +SELECT ServerId, Active, Port, Endpoint, GameName, HostName, IsPasswordProtected +FROM EFServers +""") +rows = existing_cur.fetchall() + +# Define the game name mapping +game_mapping = {5: "WaW", 7: "BO2", 6: "BO", 3: "MW3"} + +for row in rows: + server_id = row[0] + active = row[1] + port = row[2] + endpoint = row[3] + game_name = row[4] + server_name = row[5] + is_password_protected = row[6] + + # Replace the game_name with corresponding text + game_name = game_mapping.get(game_name, game_name) + + # Replace the IsPasswordProtected values with 'Yes' or 'No' + password = "Yes" if is_password_protected == 1 else "No" + + # Insert the modified row into the new Servers table + new_cur.execute(""" + INSERT INTO Servers ( + ServerId, + Active, + Port, + Endpoint, + Game, + ServerName, + Password + ) VALUES (?, ?, ?, ?, ?, ?, ?) + """, (server_id, active, port, endpoint, game_name, server_name, password)) + +# Commit the changes and close the connections +new_conn.commit() +existing_conn.close() new_conn.close() \ No newline at end of file diff --git a/Export EFServers/db_export_servers_json.py b/src/Export EFServers/db_export_servers_json.py similarity index 96% rename from Export EFServers/db_export_servers_json.py rename to src/Export EFServers/db_export_servers_json.py index c515676..cee96db 100644 --- a/Export EFServers/db_export_servers_json.py +++ b/src/Export EFServers/db_export_servers_json.py @@ -1,33 +1,33 @@ -import sqlite3 -import json - -# Connect to the new database -new_conn = sqlite3.connect("Database.db") -new_cur = new_conn.cursor() - -# Fetch data from the Servers table -new_cur.execute(""" -SELECT ServerId, Active, Port, Endpoint, Game, ServerName, Password -FROM Servers -""") -servers = new_cur.fetchall() - -# Create a list of dictionaries representing the servers -servers_list = [] -for row in servers: - servers_list.append({ - "ServerId": row[0], - "Active": row[1], - "Port": row[2], - "Endpoint": row[3], - "Game": row[4], - "ServerName": row[5], - "Password": row[6] - }) - -# Write the servers data to a JSON file -with open("Servers.json", "w") as f: - json.dump(servers_list, f, indent=2) - -# Close the new database +import sqlite3 +import json + +# Connect to the new database +new_conn = sqlite3.connect("Database.db") +new_cur = new_conn.cursor() + +# Fetch data from the Servers table +new_cur.execute(""" +SELECT ServerId, Active, Port, Endpoint, Game, ServerName, Password +FROM Servers +""") +servers = new_cur.fetchall() + +# Create a list of dictionaries representing the servers +servers_list = [] +for row in servers: + servers_list.append({ + "ServerId": row[0], + "Active": row[1], + "Port": row[2], + "Endpoint": row[3], + "Game": row[4], + "ServerName": row[5], + "Password": row[6] + }) + +# Write the servers data to a JSON file +with open("Servers.json", "w") as f: + json.dump(servers_list, f, indent=2) + +# Close the new database new_conn.close() \ No newline at end of file diff --git a/Export InboxMessages/db_export_inboxmessages.py b/src/Export InboxMessages/db_export_inboxmessages.py similarity index 97% rename from Export InboxMessages/db_export_inboxmessages.py rename to src/Export InboxMessages/db_export_inboxmessages.py index ab2f1ae..68f1f5f 100644 --- a/Export InboxMessages/db_export_inboxmessages.py +++ b/src/Export InboxMessages/db_export_inboxmessages.py @@ -1,56 +1,56 @@ -import sqlite3 - -# Connect to the existing database -existing_conn = sqlite3.connect("Database.db") -existing_cur = existing_conn.cursor() - -# Create a new database -new_conn = sqlite3.connect("Plutonium_Servers.db") -new_cur = new_conn.cursor() - -# Create the new InboxMessagesModified table -new_cur.execute(""" -CREATE TABLE InboxMessagesModified ( - InboxMessageId INTEGER PRIMARY KEY, - Created TEXT, - Origin TEXT, - Target TEXT, - ServerId INTEGER, - Message TEXT, - Read TEXT -) -""") - -# Fetch data from the InboxMessages table -existing_cur.execute("SELECT * FROM InboxMessages") -inbox_messages = existing_cur.fetchall() - -# Iterate through the InboxMessages and insert modified data into the new table -for msg in inbox_messages: - msg_id, created, _, source_client_id, dest_client_id, server_id, message, is_delivered = msg - - # Find the SourceClientId and DestinationClientId names - for client_id in (source_client_id, dest_client_id): - existing_cur.execute("SELECT CurrentAliasId FROM EFClients WHERE ClientId = ?", (client_id,)) - alias_id = existing_cur.fetchone()[0] - existing_cur.execute("SELECT Name FROM EFAlias WHERE AliasId = ?", (alias_id,)) - name = existing_cur.fetchone()[0].replace('^7', '') - - if client_id == source_client_id: - origin = name - else: - target = name - - # Update Read status - read = "Yes" if is_delivered == 1 else "No" - - # Insert the modified data into the new table - new_cur.execute(""" - INSERT INTO InboxMessagesModified (InboxMessageId, Created, Origin, Target, ServerId, Message, Read) - VALUES (?, ?, ?, ?, ?, ?, ?) - """, (msg_id, created, origin, target, server_id, message, read)) - -# Commit the changes and close the connections -new_conn.commit() -existing_conn.close() +import sqlite3 + +# Connect to the existing database +existing_conn = sqlite3.connect("Database.db") +existing_cur = existing_conn.cursor() + +# Create a new database +new_conn = sqlite3.connect("Plutonium_Servers.db") +new_cur = new_conn.cursor() + +# Create the new InboxMessagesModified table +new_cur.execute(""" +CREATE TABLE InboxMessagesModified ( + InboxMessageId INTEGER PRIMARY KEY, + Created TEXT, + Origin TEXT, + Target TEXT, + ServerId INTEGER, + Message TEXT, + Read TEXT +) +""") + +# Fetch data from the InboxMessages table +existing_cur.execute("SELECT * FROM InboxMessages") +inbox_messages = existing_cur.fetchall() + +# Iterate through the InboxMessages and insert modified data into the new table +for msg in inbox_messages: + msg_id, created, _, source_client_id, dest_client_id, server_id, message, is_delivered = msg + + # Find the SourceClientId and DestinationClientId names + for client_id in (source_client_id, dest_client_id): + existing_cur.execute("SELECT CurrentAliasId FROM EFClients WHERE ClientId = ?", (client_id,)) + alias_id = existing_cur.fetchone()[0] + existing_cur.execute("SELECT Name FROM EFAlias WHERE AliasId = ?", (alias_id,)) + name = existing_cur.fetchone()[0].replace('^7', '') + + if client_id == source_client_id: + origin = name + else: + target = name + + # Update Read status + read = "Yes" if is_delivered == 1 else "No" + + # Insert the modified data into the new table + new_cur.execute(""" + INSERT INTO InboxMessagesModified (InboxMessageId, Created, Origin, Target, ServerId, Message, Read) + VALUES (?, ?, ?, ?, ?, ?, ?) + """, (msg_id, created, origin, target, server_id, message, read)) + +# Commit the changes and close the connections +new_conn.commit() +existing_conn.close() new_conn.close() \ No newline at end of file diff --git a/Export InboxMessages/db_export_inboxmessages_json.py b/src/Export InboxMessages/db_export_inboxmessages_json.py similarity index 96% rename from Export InboxMessages/db_export_inboxmessages_json.py rename to src/Export InboxMessages/db_export_inboxmessages_json.py index 9d4ec7d..0b5cd74 100644 --- a/Export InboxMessages/db_export_inboxmessages_json.py +++ b/src/Export InboxMessages/db_export_inboxmessages_json.py @@ -1,34 +1,34 @@ -import sqlite3 -import json - -# Connect to the new database -new_conn = sqlite3.connect("Plutonium_Servers.db") -new_cur = new_conn.cursor() - -# Fetch data from the InboxMessagesModified table -new_cur.execute(""" -SELECT InboxMessageId, Created, Origin, Target, ServerId, Message, Read -FROM InboxMessagesModified -ORDER BY Created DESC -""") -inbox_messages = new_cur.fetchall() - -# Create a list of dictionaries representing the inbox messages -inbox_messages_list = [] -for row in inbox_messages: - inbox_messages_list.append({ - "InboxMessageId": row[0], - "Created": row[1], - "Origin": row[2], - "Target": row[3], - "ServerId": row[4], - "Message": row[5], - "Read": row[6] - }) - -# Write the inbox messages to a JSON file -with open("InboxMessages.json", "w") as f: - json.dump(inbox_messages_list, f, indent=2) - -# Close the new database +import sqlite3 +import json + +# Connect to the new database +new_conn = sqlite3.connect("Plutonium_Servers.db") +new_cur = new_conn.cursor() + +# Fetch data from the InboxMessagesModified table +new_cur.execute(""" +SELECT InboxMessageId, Created, Origin, Target, ServerId, Message, Read +FROM InboxMessagesModified +ORDER BY Created DESC +""") +inbox_messages = new_cur.fetchall() + +# Create a list of dictionaries representing the inbox messages +inbox_messages_list = [] +for row in inbox_messages: + inbox_messages_list.append({ + "InboxMessageId": row[0], + "Created": row[1], + "Origin": row[2], + "Target": row[3], + "ServerId": row[4], + "Message": row[5], + "Read": row[6] + }) + +# Write the inbox messages to a JSON file +with open("InboxMessages.json", "w") as f: + json.dump(inbox_messages_list, f, indent=2) + +# Close the new database new_conn.close() \ No newline at end of file diff --git a/export_db_json.py b/src/export_db_json.py similarity index 96% rename from export_db_json.py rename to src/export_db_json.py index 92b9bc8..3e6ccb8 100644 --- a/export_db_json.py +++ b/src/export_db_json.py @@ -1,265 +1,265 @@ -# Exports the newly created ClientConnectionHistory, IPAddresses, AuditLog, ClientConnectionHistory, ClientMessages, Clients, Maps, Metadata, Penalties, PenaltyIdentifiers, Servers & InboxMessages Tables to separate files in .json format. - -# Created by Ahrimdon aided by GPT-4 - -import sqlite3 -import json - -new_conn = sqlite3.connect("Plutonium_Servers.db") -new_cur = new_conn.cursor() - -# ------------------- IPAddresses Table ------------------- - -new_cur.execute(""" -SELECT Name, SearchableIPAddress, DateAdded FROM "IPAddresses" -ORDER BY DateAdded DESC -""") -client_info = new_cur.fetchall() - -client_info_list = [] -for row in client_info: - client_info_list.append({ - "Name": row[0], - "SearchableIPAddress": row[1], - "DateAdded": row[2] - }) - -with open("IPAddresses.json", "w") as f: - json.dump(client_info_list, f, indent=2) - -# ------------------- AuditLog Table ------------------- - -new_cur.execute(""" -SELECT ChangeHistoryId, TypeOfChange, Time, Data, Command, Origin, Target FROM AuditLog -ORDER BY Time DESC -""") -ef_change_history = new_cur.fetchall() - -ef_change_history_list = [] -for row in ef_change_history: - ef_change_history_list.append({ - "ChangeHistoryId": row[0], - "TypeOfChange": row[1], - "Time": row[2], - "Data": row[3], - "Command": row[4], - "Origin": row[5], - "Target": row[6] - }) - -with open("AuditLog.json", "w") as f: - json.dump(ef_change_history_list, f, indent=2) - -# ------------------- ClientConnectionHistory Table ------------------- - -new_cur.execute(""" -SELECT ConnectionId, Client, ConnectionTime, ConnectionType, Server -FROM ClientConnectionHistory -ORDER BY ConnectionTime DESC -""") -client_connection_history = new_cur.fetchall() - -client_connection_history_list = [] -for row in client_connection_history: - client_connection_history_list.append({ - "ConnectionId": row[0], - "Client": row[1], - "ConnectionTime": row[2], - "ConnectionType": row[3], - "Server": row[4] - }) - -with open("ClientConnectionHistory.json", "w") as f: - json.dump(client_connection_history_list, f, indent=2) - -# ------------------- Messages Table ------------------- - -new_cur.execute(""" -SELECT MessageId, Client, Message, TimeSent, Server -FROM ClientMessages -ORDER BY TimeSent DESC -""") -client_messages = new_cur.fetchall() - -client_messages_list = [] -for row in client_messages: - client_messages_list.append({ - "MessageId": row[0], - "Client": row[1], - "Message": row[2], - "TimeSent": row[3], - "Server": row[4] - }) - -with open("ClientMessages.json", "w") as f: - json.dump(client_messages_list, f, indent=2) - -# ------------------- Clients Table ------------------- - -new_cur.execute(""" -SELECT Connections, Name, FirstConnection, Game, LastConnection, Level, Masked, TotalConnectionTime -FROM Clients -ORDER BY LastConnection DESC -""") -clients = new_cur.fetchall() - -clients_list = [] -for row in clients: - clients_list.append({ - "Connections": row[0], - "Name": row[1], - "FirstConnection": row[2], - "Game": row[3], - "LastConnection": row[4], - "Level": row[5], - "Masked": row[6], - "TotalConnectionTime": row[7] - }) - -with open("Clients.json", "w") as f: - json.dump(clients_list, f, indent=2) - -# ------------------- Maps Table ------------------- - -new_cur.execute(""" -SELECT - MapId, CreatedDateTime, Name, Game -FROM - Maps -ORDER BY - MapId DESC -""") -rows = new_cur.fetchall() - -maps_list = [] -for row in rows: - map_dict = { - "MapId": row[0], - "CreatedDateTime": row[1], - "Name": row[2], - "Game": row[3] - } - maps_list.append(map_dict) - -with open("maps_export.json", "w") as json_file: - json.dump(maps_list, json_file, indent=4) - -# ------------------- Meta Table ------------------- - -new_cur.execute(""" -SELECT MetaId, Name, Timestamp, Note, Value -FROM Metadata -ORDER BY Timestamp DESC -""") -metadata = new_cur.fetchall() - -metadata_list = [] -for row in metadata: - metadata_list.append({ - "MetaId": row[0], - "Name": row[1], - "Timestamp": row[2], - "Note": row[3], - "Value": row[4] - }) - -with open("Metadata.json", "w") as f: - json.dump(metadata_list, f, indent=2) - -# ------------------- Penalties Table ------------------- - -new_cur.execute(""" -SELECT PenaltyId, AutomatedOffense, Expires, EvadedOffense, Offender, Offense, Punisher, Type, Timestamp -FROM Penalties -ORDER BY Timestamp DESC -""") -penalties = new_cur.fetchall() - -penalties_list = [] -for row in penalties: - penalties_list.append({ - "PenaltyId": row[0], - "AutomatedOffense": row[1], - "Expires": row[2], - "EvadedOffense": row[3], - "Offender": row[4], - "Offense": row[5], - "Punisher": row[6], - "Type": row[7], - "Timestamp": row[8] - }) - -with open("Penalties.json", "w") as f: - json.dump(penalties_list, f, indent=2) - -# ------------------- PenaltyIdentifiers Table ------------------- - -new_cur.execute(""" -SELECT PenaltyIdentifierId, PenaltyId, Created, Client -FROM PenaltyIdentifiers -ORDER BY Created DESC -""") -penalty_identifiers = new_cur.fetchall() - -penalty_identifiers_list = [] -for row in penalty_identifiers: - penalty_identifiers_list.append({ - "PenaltyIdentifierId": row[0], - "PenaltyId": row[1], - "Created": row[2], - "Client": row[3] - }) - -with open("PenaltyIdentifiers.json", "w") as f: - json.dump(penalty_identifiers_list, f, indent=2) - -# ------------------- Servers Table ------------------- - -new_cur.execute(""" -SELECT ServerId, Active, Port, Endpoint, Game, ServerName, Password -FROM Servers -""") -servers = new_cur.fetchall() - -servers_list = [] -for row in servers: - servers_list.append({ - "ServerId": row[0], - "Active": row[1], - "Port": row[2], - "Endpoint": row[3], - "Game": row[4], - "ServerName": row[5], - "Password": row[6] - }) - -with open("Servers.json", "w") as f: - json.dump(servers_list, f, indent=2) - -# ------------------- InboxMessages Table ------------------- - -new_cur.execute(""" -SELECT InboxMessageId, Created, Origin, Target, ServerId, Message, Read -FROM InboxMessagesModified -ORDER BY Created DESC -""") -inbox_messages = new_cur.fetchall() - -inbox_messages_list = [] -for row in inbox_messages: - inbox_messages_list.append({ - "InboxMessageId": row[0], - "Created": row[1], - "Origin": row[2], - "Target": row[3], - "ServerId": row[4], - "Message": row[5], - "Read": row[6] - }) - -with open("InboxMessages.json", "w") as f: - json.dump(inbox_messages_list, f, indent=2) - -# ------------------- End ------------------- - +# Exports the newly created ClientConnectionHistory, IPAddresses, AuditLog, ClientConnectionHistory, ClientMessages, Clients, Maps, Metadata, Penalties, PenaltyIdentifiers, Servers & InboxMessages Tables to separate files in .json format. + +# Created by Ahrimdon aided by GPT-4 + +import sqlite3 +import json + +new_conn = sqlite3.connect("Plutonium_Servers.db") +new_cur = new_conn.cursor() + +# ------------------- IPAddresses Table ------------------- + +new_cur.execute(""" +SELECT Name, SearchableIPAddress, DateAdded FROM "IPAddresses" +ORDER BY DateAdded DESC +""") +client_info = new_cur.fetchall() + +client_info_list = [] +for row in client_info: + client_info_list.append({ + "Name": row[0], + "SearchableIPAddress": row[1], + "DateAdded": row[2] + }) + +with open("IPAddresses.json", "w") as f: + json.dump(client_info_list, f, indent=2) + +# ------------------- AuditLog Table ------------------- + +new_cur.execute(""" +SELECT ChangeHistoryId, TypeOfChange, Time, Data, Command, Origin, Target FROM AuditLog +ORDER BY Time DESC +""") +ef_change_history = new_cur.fetchall() + +ef_change_history_list = [] +for row in ef_change_history: + ef_change_history_list.append({ + "ChangeHistoryId": row[0], + "TypeOfChange": row[1], + "Time": row[2], + "Data": row[3], + "Command": row[4], + "Origin": row[5], + "Target": row[6] + }) + +with open("AuditLog.json", "w") as f: + json.dump(ef_change_history_list, f, indent=2) + +# ------------------- ClientConnectionHistory Table ------------------- + +new_cur.execute(""" +SELECT ConnectionId, Client, ConnectionTime, ConnectionType, Server +FROM ClientConnectionHistory +ORDER BY ConnectionTime DESC +""") +client_connection_history = new_cur.fetchall() + +client_connection_history_list = [] +for row in client_connection_history: + client_connection_history_list.append({ + "ConnectionId": row[0], + "Client": row[1], + "ConnectionTime": row[2], + "ConnectionType": row[3], + "Server": row[4] + }) + +with open("ClientConnectionHistory.json", "w") as f: + json.dump(client_connection_history_list, f, indent=2) + +# ------------------- Messages Table ------------------- + +new_cur.execute(""" +SELECT MessageId, Client, Message, TimeSent, Server +FROM ClientMessages +ORDER BY TimeSent DESC +""") +client_messages = new_cur.fetchall() + +client_messages_list = [] +for row in client_messages: + client_messages_list.append({ + "MessageId": row[0], + "Client": row[1], + "Message": row[2], + "TimeSent": row[3], + "Server": row[4] + }) + +with open("ClientMessages.json", "w") as f: + json.dump(client_messages_list, f, indent=2) + +# ------------------- Clients Table ------------------- + +new_cur.execute(""" +SELECT Connections, Name, FirstConnection, Game, LastConnection, Level, Masked, TotalConnectionTime +FROM Clients +ORDER BY LastConnection DESC +""") +clients = new_cur.fetchall() + +clients_list = [] +for row in clients: + clients_list.append({ + "Connections": row[0], + "Name": row[1], + "FirstConnection": row[2], + "Game": row[3], + "LastConnection": row[4], + "Level": row[5], + "Masked": row[6], + "TotalConnectionTime": row[7] + }) + +with open("Clients.json", "w") as f: + json.dump(clients_list, f, indent=2) + +# ------------------- Maps Table ------------------- + +new_cur.execute(""" +SELECT + MapId, CreatedDateTime, Name, Game +FROM + Maps +ORDER BY + MapId DESC +""") +rows = new_cur.fetchall() + +maps_list = [] +for row in rows: + map_dict = { + "MapId": row[0], + "CreatedDateTime": row[1], + "Name": row[2], + "Game": row[3] + } + maps_list.append(map_dict) + +with open("maps_export.json", "w") as json_file: + json.dump(maps_list, json_file, indent=4) + +# ------------------- Meta Table ------------------- + +new_cur.execute(""" +SELECT MetaId, Name, Timestamp, Note, Value +FROM Metadata +ORDER BY Timestamp DESC +""") +metadata = new_cur.fetchall() + +metadata_list = [] +for row in metadata: + metadata_list.append({ + "MetaId": row[0], + "Name": row[1], + "Timestamp": row[2], + "Note": row[3], + "Value": row[4] + }) + +with open("Metadata.json", "w") as f: + json.dump(metadata_list, f, indent=2) + +# ------------------- Penalties Table ------------------- + +new_cur.execute(""" +SELECT PenaltyId, AutomatedOffense, Expires, EvadedOffense, Offender, Offense, Punisher, Type, Timestamp +FROM Penalties +ORDER BY Timestamp DESC +""") +penalties = new_cur.fetchall() + +penalties_list = [] +for row in penalties: + penalties_list.append({ + "PenaltyId": row[0], + "AutomatedOffense": row[1], + "Expires": row[2], + "EvadedOffense": row[3], + "Offender": row[4], + "Offense": row[5], + "Punisher": row[6], + "Type": row[7], + "Timestamp": row[8] + }) + +with open("Penalties.json", "w") as f: + json.dump(penalties_list, f, indent=2) + +# ------------------- PenaltyIdentifiers Table ------------------- + +new_cur.execute(""" +SELECT PenaltyIdentifierId, PenaltyId, Created, Client +FROM PenaltyIdentifiers +ORDER BY Created DESC +""") +penalty_identifiers = new_cur.fetchall() + +penalty_identifiers_list = [] +for row in penalty_identifiers: + penalty_identifiers_list.append({ + "PenaltyIdentifierId": row[0], + "PenaltyId": row[1], + "Created": row[2], + "Client": row[3] + }) + +with open("PenaltyIdentifiers.json", "w") as f: + json.dump(penalty_identifiers_list, f, indent=2) + +# ------------------- Servers Table ------------------- + +new_cur.execute(""" +SELECT ServerId, Active, Port, Endpoint, Game, ServerName, Password +FROM Servers +""") +servers = new_cur.fetchall() + +servers_list = [] +for row in servers: + servers_list.append({ + "ServerId": row[0], + "Active": row[1], + "Port": row[2], + "Endpoint": row[3], + "Game": row[4], + "ServerName": row[5], + "Password": row[6] + }) + +with open("Servers.json", "w") as f: + json.dump(servers_list, f, indent=2) + +# ------------------- InboxMessages Table ------------------- + +new_cur.execute(""" +SELECT InboxMessageId, Created, Origin, Target, ServerId, Message, Read +FROM InboxMessagesModified +ORDER BY Created DESC +""") +inbox_messages = new_cur.fetchall() + +inbox_messages_list = [] +for row in inbox_messages: + inbox_messages_list.append({ + "InboxMessageId": row[0], + "Created": row[1], + "Origin": row[2], + "Target": row[3], + "ServerId": row[4], + "Message": row[5], + "Read": row[6] + }) + +with open("InboxMessages.json", "w") as f: + json.dump(inbox_messages_list, f, indent=2) + +# ------------------- End ------------------- + new_conn.close() \ No newline at end of file