Use much faster method of backing up sql tables (still need to fix dates though)

This commit is contained in:
UnknownShadow200 2016-11-07 23:12:07 +11:00
parent 413990e3ca
commit 9be0b0691d
5 changed files with 32 additions and 424 deletions

View File

@ -29,62 +29,70 @@ namespace MCGalaxy.SQL {
}
[Obsolete("Use Execute() method instead.")]
public static void executeQuery(string queryString, bool createDB = false) {
public static void executeQuery(string sql, bool createDB = false) {
ParameterisedQuery query = Backend.GetStaticParameterised();
Execute(query, queryString, createDB, null);
Execute(query, sql, createDB, null);
}
public static void Execute(string queryString) {
public static void Execute(string sql) {
ParameterisedQuery query = Backend.GetStaticParameterised();
Execute(query, queryString, false, null);
Execute(query, sql, false, null);
}
public static void Execute(string queryString, params object[] args) {
public static void Execute(string sql, params object[] args) {
ParameterisedQuery query = Backend.CreateParameterised();
Execute(query, queryString, false, args);
Execute(query, sql, false, args);
}
public static void ExecuteReader(string sql, ReaderCallback callback, params object[] args) {
ParameterisedQuery query = Backend.CreateParameterised();
DoDatabaseCall(query, sql, false, null, callback, args);
}
[Obsolete("Use Fill() method instead.")]
public static DataTable fillData(string queryString, bool skipError = false) {
public static DataTable fillData(string sql, bool skipError = false) {
ParameterisedQuery query = Backend.GetStaticParameterised();
return Fill(query, queryString, null);
return Fill(query, sql, null);
}
public static DataTable Fill(string queryString) {
public static DataTable Fill(string sql) {
ParameterisedQuery query = Backend.GetStaticParameterised();
return Fill(query, queryString, null);
return Fill(query, sql, null);
}
public static DataTable Fill(string queryString, params object[] args) {
public static DataTable Fill(string sql, params object[] args) {
ParameterisedQuery query = Backend.CreateParameterised();
return Fill(query, queryString, args);
return Fill(query, sql, args);
}
internal static void Execute(ParameterisedQuery query, string sql, bool createDB, params object[] args) {
DoDatabaseCall(query, sql, createDB, null, args);
DoDatabaseCall(query, sql, createDB, null, null, args);
}
internal static DataTable Fill(ParameterisedQuery query, string sql, params object[] args) {
using (DataTable results = new DataTable("toReturn")) {
DoDatabaseCall(query, sql, false, results, args);
DoDatabaseCall(query, sql, false, results, null, args);
return results;
}
}
static void DoDatabaseCall(ParameterisedQuery query, string sql, bool createDB,
DataTable results, params object[] args) {
DataTable results, ReaderCallback callback, params object[] args) {
BindParams(query, args);
string connString = Backend.ConnectionString;
Exception e = null;
for (int i = 0; i < 10; i++) {
try {
if (results == null) {
if (callback != null) {
query.ExecuteReader(sql, connString, callback);
} else if (results == null) {
query.Execute(sql, connString, createDB);
} else {
query.Fill(sql, connString, results);
}
query.ClearParams();
return;
} catch (Exception ex) {

View File

@ -22,7 +22,7 @@ using System.Data.Common;
namespace MCGalaxy.SQL {
public delegate void ReaderCallback(DataTable schema, IDataReader reader);
public delegate void ReaderCallback(IDataReader reader);
public abstract class ParameterisedQuery {
@ -77,7 +77,7 @@ namespace MCGalaxy.SQL {
}
}
public void ExecuteReader(string query, string connString) {
public void ExecuteReader(string query, string connString, ReaderCallback callback) {
using (IDbConnection conn = CreateConnection(connString)) {
conn.Open();
if (MultipleSchema)
@ -86,9 +86,7 @@ namespace MCGalaxy.SQL {
using (IDbCommand cmd = CreateCommand(query, conn)) {
FillParams(cmd);
using (IDataReader reader = cmd.ExecuteReader()) {
while (reader.Read()) {
// TODO: do callback here
}
while (reader.Read()) { callback(reader); }
}
}
conn.Close();
@ -102,7 +100,6 @@ namespace MCGalaxy.SQL {
dParam.Value = param.Value;
cmd.Parameters.Add(dParam);
}
}
}
}
}

View File

@ -595,8 +595,9 @@
<Compile Include="Database\Database.cs" />
<Compile Include="Games\CTF\CtfGame.cs" />
<Compile Include="Levels\BlockQueue.cs" />
<Compile Include="Server\Backup.cs" />
<Compile Include="Server\BackupDB.cs" />
<Compile Include="Server\Backup\Backup.cs" />
<Compile Include="Server\Backup\BackupDB.cs" />
<Compile Include="Server\Backup\TableDumper.cs" />
<Compile Include="Server\Extra\UPnP.cs" />
<Compile Include="Network\IBeat.cs" />
<Compile Include="Levels\Level.cs" />
@ -738,6 +739,7 @@
<Folder Include="CorePlugin" />
<Folder Include="Levels\IO\Importers" />
<Folder Include="Levels\IO\Exporters" />
<Folder Include="Server\Backup" />
<Folder Include="Server\Tasks" />
<Folder Include="util\Math" />
<Folder Include="Player\Group" />

View File

@ -1,165 +0,0 @@
/*
Copyright 2011 MCForge
Dual-licensed under the Educational Community License, Version 2.0 and
the GNU General Public License, Version 3 (the "Licenses"); you may
not use this file except in compliance with the Licenses. You may
obtain a copy of the Licenses at
http://www.opensource.org/licenses/ecl2.php
http://www.gnu.org/licenses/gpl-3.0.html
Unless required by applicable law or agreed to in writing,
software distributed under the Licenses are distributed on an "AS IS"
BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied. See the Licenses for the specific language governing
permissions and limitations under the Licenses.
*/
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Packaging;
namespace MCGalaxy {
public static partial class Backup {
const string path = "MCGalaxy.zip";
public class BackupArgs {
public Player p;
public bool Files, Database, Lite;
}
public static void CreatePackage(Player p, bool files, bool db, bool lite) {
if (db) {
Server.s.Log("Backing up the database...");
using (StreamWriter sql = new StreamWriter("SQL.sql"))
BackupDatabase(sql,lite);
Server.s.Log("Backed up the database to SQL.sql");
}
List<Uri> filesList = null;
if (files) {
Server.s.Log("Determining which files to backup...");
string dir = Directory.GetCurrentDirectory() + "\\";
filesList = GetAllFiles(new DirectoryInfo("./"), new Uri(dir), lite);
Server.s.Log("Finished determining included files");
}
Server.s.Log("Creating compressed backup...");
using (ZipPackage package = (ZipPackage)ZipPackage.Open(path, FileMode.Create)) {
if (files) {
Server.s.Log("Compressing files...");
SaveFiles(package, filesList);
}
if (db) SaveDatabase(package);
Server.s.Log("Compressed all data!");
}
Player.Message(p, "Backup of (" + (files ? "everything" + (db ? "" : " but database") : "database") + ") complete!");
Server.s.Log("Server backed up!");
}
const string undo1 = "extra/undo/", undo2 = @"extra\undo\";
const string prev1 = "extra/undoPrevious/", prev2 = @"extra\undoPrevious\";
const string levelBackup1 = "levels/backups/", levelBackup2 = @"levels\backups\";
const string levelPrev1 = "levels/prev/", levelPrev2 = @"levels\prev\";
const string blockDB1 = "blockdb/", blockDB2 = @"blockdb\";
static List<Uri> GetAllFiles(DirectoryInfo dir, Uri baseUri, bool lite) {
List<Uri> list = new List<Uri>();
foreach (FileSystemInfo entry in dir.GetFileSystemInfos()) {
if (entry is FileInfo) {
string path = ((FileInfo)entry).FullName;
if (lite && (path.Contains(undo1) || path.Contains(undo2))) continue;
if (lite && (path.Contains(prev1) || path.Contains(prev2))) continue;
if (lite && (path.Contains(levelBackup1) || path.Contains(levelBackup2))) continue;
if (lite && (path.Contains(levelPrev1) || path.Contains(levelPrev2))) continue;
if (lite && (path.Contains(blockDB1) || path.Contains(blockDB2))) continue;
// Make a relative URI
Uri uri = baseUri.MakeRelativeUri(new Uri(path));
if (uri.ToString().IndexOfAny("/\\".ToCharArray()) > 0)
list.Add(PackUriHelper.CreatePartUri(uri));
} else {
list.AddRange(GetAllFiles((DirectoryInfo)entry, baseUri, lite));
}
}
return list;
}
static void SaveFiles(ZipPackage package, List<Uri> partURIs) {
foreach (Uri loc in partURIs) {
string file = Uri.UnescapeDataString(loc.ToString());
if (file.Contains(path)) continue;
try {
PackagePart part = package.CreatePart(loc, "");
using (Stream src = new FileStream("./" + file, FileMode.Open, FileAccess.Read))
CopyStream(src, part.GetStream());
} catch (Exception ex) {
Server.s.Log("Failed to save file: " + file);
Server.ErrorLog(ex);
}
}
}
static void SaveDatabase(ZipPackage package) {
Server.s.Log("Compressing Database...");
Uri uri = new Uri("/SQL.sql", UriKind.Relative);
PackagePart part = package.CreatePart(uri, "", CompressionOption.Normal);
CopyStream(File.OpenRead("SQL.sql"), part.GetStream());
Server.s.Log("Database compressed");
}
static void CopyStream(Stream source, Stream target) {
const int bufSize = 0x1000;
byte[] buf = new byte[bufSize];
int bytesRead = 0;
while ((bytesRead = source.Read(buf, 0, bufSize)) > 0)
target.Write(buf, 0, bytesRead);
}
public static void ExtractPackage(Player p) {
int errors = 0;
using (FileStream src = File.OpenRead(path))
using (ZipPackage zip = (ZipPackage)ZipPackage.Open(src))
{
PackagePartCollection parts = zip.GetParts();
foreach (ZipPackagePart item in parts) {
ExtractItem(item, ref errors);
if (item.Uri.ToString().ToLower().Contains("sql.sql")) {
// If it's in there, they backed it up, meaning they want it restored
Backup.ReplaceDatabase(item.GetStream());
}
}
}
// To make life easier, we reload settings now, to maker it less likely to need restart
Command.all.Find("server").Use(null, "reload"); // Reload, as console
Player.Message(p, "Server restored" + (errors > 0 ? " with errors. May be a partial restore" : "") + ". Restart is reccommended, though not required.");
}
static void ExtractItem(ZipPackagePart item, ref int errors) {
string entry = item.Uri.ToString();
string file = "./" + Uri.UnescapeDataString(entry);
using (Stream src = item.GetStream()) {
try {
using (Stream dst = File.Create(file))
CopyStream(src, dst);
} catch {
try {
Directory.CreateDirectory("./" + entry.Substring(0, entry.LastIndexOfAny("\\/".ToCharArray())));
using (Stream dst = File.Create(file))
CopyStream(src, dst);
} catch (IOException e) {
Server.ErrorLog(e);
Server.s.Log("Caught ignoreable Error. See log for more details. Will continue with rest of files.");
errors++;
}
}
}
}
}
}

View File

@ -1,234 +0,0 @@
/*
Copyright 2011 MCForge
Dual-licensed under the Educational Community License, Version 2.0 and
the GNU General Public License, Version 3 (the "Licenses"); you may
not use this file except in compliance with the Licenses. You may
obtain a copy of the Licenses at
http://www.opensource.org/licenses/ecl2.php
http://www.gnu.org/licenses/gpl-3.0.html
Unless required by applicable law or agreed to in writing,
software distributed under the Licenses are distributed on an "AS IS"
BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
or implied. See the Licenses for the specific language governing
permissions and limitations under the Licenses.
*/
using System;
using System.Collections.Generic;
using System.Data;
using System.IO;
using MCGalaxy.SQL;
namespace MCGalaxy {
public static partial class Backup {
public static void BackupDatabase(StreamWriter sql, bool lite) {
//We technically know all tables in the DB... But since this is MySQL, we can also get them all with a MySQL command
//So we show the tables, and store the result.
//Also output information data (Same format as phpMyAdmin's dump)
//Important note: This does NOT account for foreign keys, BLOB's etc. It only works for what we actually put in the db.
sql.WriteLine("-- MCGalaxy SQL Database Dump");
sql.WriteLine("-- version 1.5");
sql.WriteLine("-- http://mcgalaxy.ml");
sql.WriteLine("--");
sql.WriteLine("-- Host: {0}", Server.MySQLHost);
sql.WriteLine("-- Generation Time: {0:d} at {0:HH:mm:ss}", DateTime.Now, DateTime.Now);
sql.WriteLine("-- MCGalaxy Version: {0}", Server.Version);
sql.WriteLine();
sql.WriteLine();
List<string> sqlTables = Database.Backend.AllTables();
foreach (string name in sqlTables) {
if (lite && name.CaselessStarts("Block")) continue;
BackupTable(name, sql);
}
}
public static void BackupTable(string tableName, StreamWriter sql) {
//For each table, we iterate through all rows, (and save them)
sql.WriteLine();
sql.WriteLine("-- --------------------------------------------------------");
sql.WriteLine("-- Table structure for table `{0}`", tableName);
sql.WriteLine();
WriteTableSchema(tableName, sql);
using (DataTable data = Database.Backend.GetRows(tableName, "*")) {
if (data.Rows.Count == 0) {
sql.WriteLine("-- No data in table `{0}`!", tableName);
sql.WriteLine();
return;
}
sql.WriteLine("--");
sql.WriteLine("-- Dumping data for table `{0}`", tableName);
sql.WriteLine("--");
sql.WriteLine();
List<DataColumn> allCols = new List<DataColumn>();
foreach (DataColumn col in data.Columns) {
allCols.Add(col);
}
string insertCols = FormatInsertColumns(allCols, tableName);
foreach (DataRow row in data.Rows) { //We rely on the correct datatype being given here.
sql.WriteLine();
sql.WriteLine(insertCols);
sql.Write("(");
for (int col = 0; col < data.Columns.Count; col++) {
//The values themselves can be integers or strings, or null
Type type = allCols[col].DataType;
if (row.IsNull(col)) {
sql.Write("NULL");
} else if (type == typeof(DateTime)) { // special format
sql.Write("'{0:yyyy-MM-dd HH:mm:ss.ffff}'", (DateTime)row[col]);
} else if (type == typeof(bool)) {
sql.Write((bool)row[col] ? "1" : "0");
} else if (type == typeof(string)) {
string value = row[col].ToString();
if (value.IndexOf('\'') >= 0) // escape '
value = value.Replace("'", "''");
sql.Write("'{0}'", value);
} else {
sql.Write(row[col]); // We assume all other data is left as-is
//This includes numbers, blobs, etc. (As well as objects, but we don't save them into the database)
}
sql.Write((col < row.ItemArray.Length - 1 ? ", " : ");"));
}
}
sql.WriteLine();
}
}
static string FormatInsertColumns(List<DataColumn> cols, string name) {
string sql = "INSERT INTO `" + name + "` (`";
for (int i = 0; i < cols.Count; i++) {
sql += cols[i].ColumnName + "`";
if (i < cols.Count - 1) sql += ", `";
else sql += ") VALUES";
}
return sql;
}
static void WriteTableSchema(string tableName, StreamWriter sql) {
if (Server.useMySQL) {
string pri = "";
sql.WriteLine("CREATE TABLE IF NOT EXISTS `{0}` (", tableName);
using (DataTable schema = Database.Fill("DESCRIBE `" + tableName + "`")) {
string[] rowParams = new string[schema.Columns.Count];
foreach (DataRow row in schema.Rows) {
//Save the info contained to file
List<string> tmp = new List<string>();
for (int col = 0; col < schema.Columns.Count; col++)
tmp.Add(row[col].ToString());
rowParams = tmp.ToArray();
rowParams[2] = (rowParams[2].CaselessEq("no") ? "NOT " : "DEFAULT ") + "NULL";
pri += (rowParams[3].CaselessEq("pri") ? rowParams[0] + ";" : "");
sql.WriteLine("`{0}` {1} {2}" + (rowParams[5].Equals("") ? "" : " {5}") + (pri.Equals("") && row == schema.Rows[schema.Rows.Count - 1] ? "" : ","), rowParams);
}
}
if (pri != "") {
string[] tmp = pri.Substring(0, pri.Length - 1).Split(';');
sql.Write("PRIMARY KEY (`");
foreach (string prim in tmp) {
sql.Write(prim);
sql.Write("`" + (tmp[tmp.Length - 1].Equals(prim) ? ")" : ", `"));
}
}
sql.WriteLine(");");
} else {
using (DataTable tableSQL = Database.Fill("SELECT sql FROM sqlite_master" +
" WHERE tbl_name LIKE '" + tableName + "'" +
" AND type = 'table' AND name NOT LIKE 'sqlite_%'" +
" ORDER BY substr(type,2,1), name"))
{
//just print out the data in the table.
foreach (DataRow row in tableSQL.Rows) {
string tableSQLString = row[0].ToString();
sql.WriteLine(tableSQLString.Replace(" " + tableName, " `" + tableName + "`").Replace("CREATE TABLE `" + tableName + "`", "CREATE TABLE IF NOT EXISTS `" + tableName + "`") + ";");
//We parse this ourselves to find the actual types.
}
}
}
sql.WriteLine();
}
internal static void ReplaceDatabase(Stream sql) {
using (FileStream backup = File.Create("backup.sql"))
BackupDatabase(new StreamWriter(backup), false); // backup
List<string> tables = Database.Backend.AllTables();
foreach (string table in tables)
Database.Backend.DeleteTable(table); // drop all tables
ImportSql(sql);
}
internal static void ImportSql(Stream sql) {
// Import data (we only have CREATE TABLE and INSERT INTO statements)
using (StreamReader reader = new StreamReader(sql))
using (BulkTransaction helper = BulkTransaction.Create())
{
List<string> buffer = new List<string>();
while (!reader.EndOfStream) {
string cmd = NextStatement(reader, buffer);
if (cmd == null || cmd.Length == 0) continue;
int index = cmd.ToUpper().IndexOf("CREATE TABLE");
if (index > -1) ParseCreate(ref cmd, index);
//Run the command in the transaction.
helper.Execute(cmd);
}
helper.Commit();
}
}
static string NextStatement(StreamReader reader, List<string> buffer) {
buffer.Clear();
string line = null;
while ((line = reader.ReadLine()) != null) {
if (line.StartsWith("--")) continue; // comment
line = line.Trim();
if (line.Length == 0) continue; // whitespace
buffer.Add(line);
if (line[line.Length - 1] == ';') break;
}
return buffer.Join("");
}
static void ParseCreate(ref string cmd, int index) {
cmd = cmd.Remove(0, index);
cmd = cmd.Replace(" unsigned", " UNSIGNED");
if (!Server.useMySQL) return;
// MySQL does not support the format used by the SQLite backend for the primary key
const string priKey = " PRIMARY KEY AUTOINCREMENT";
int priIndex = cmd.ToUpper().IndexOf(priKey);
if (priIndex == -1) return;
// Find the name of this column
char[] sepChars = { '\t', ' ' }; // chars that separate part of a column definition
char[] startChars = { '`', '(', ' ', ',', '\t' }; // chars that can start a column definition
string before = cmd.Substring(0, priIndex);
before = before.Substring(0, before.LastIndexOfAny(sepChars)); // get rid of column type
int nameStart = before.LastIndexOfAny(startChars) + 1;
string name = before.Substring(nameStart);
// Replace the 'PRIMARY KEY AUTOINCREMENT' with just 'AUTO_INCREMENT';
cmd = cmd.Remove(priIndex, priKey.Length);
cmd = cmd.Insert(priIndex, " AUTO_INCREMENT");
// Insert 'PRIMARY KEY' at end of columns definition
cmd = cmd.Insert(cmd.LastIndexOf(")"), ", PRIMARY KEY (`" + name + "`)");
}
}
}