import subprocess
import os
import shutil
import oss2
import logging # 导入日志模块
from datetime import datetime
import config # 导入配置文件
# 配置日志记录
logging.basicConfig(filename="backup.log", level=logging.INFO, format="%(asctime)s - %(levelname)s: %(message)s")
def create_backup_folder(folder_path):
"""
创建备份文件夹(如果不存在)。
Args:
folder_path (str): 备份文件夹的路径。
"""
if not os.path.exists(folder_path):
os.makedirs(folder_path)
def run_mysqldump(database_name, output_file):
"""
执行 mysqldump 命令来备份数据库。
Args:
database_name (str): 要备份的数据库名称。
output_file (str): 备份结果保存的文件路径。
"""
command = f"/usr/local/mysql-5.7.31-linux-glibc2.12-x86_64/bin/mysqldump -h{config.DB_HOST} -P{config.DB_PORT} -u{config.DB_USER} -p{config.DB_PASSWORD} --flush-logs --single-transaction --set-gtid-purged=OFF {database_name} > {output_file}"
subprocess.call([command], shell=True)
def compress_backup_folder(folder_path, password ,folder_name):
"""
压缩备份文件夹为 ZIP 文件。
Args:
folder_path (str): 备份文件夹的路径。
folder_name (str): 备份文件夹的名称。
"""
#shutil.make_archive(folder_path + folder_name, 'zip -rm', folder_path, folder_name)
# 使用subprocess运行zip -rm命令
command = f"zip --password {password} -rm {folder_name}.zip {folder_name}"
subprocess.run(command, cwd=folder_path, shell=True)
file_path = f"{folder_path}/{folder_name}.zip"
# 存储桶名称
bucket_name = config.OSS_BUCKET_NAME
# 对象名称,通常是文件在 OSS 中的路径
object_name = f"backup_folder/{folder_name}.zip"
if config.OSS_BACKUP == "true":
upload_to_oss(file_path,bucket_name,object_name)
def delete_old_backups(folder_path, num_to_keep):
"""
删除旧的备份文件,保留指定数量的备份文件。
Args:
folder_path (str): 备份文件夹的路径。
num_to_keep (int): 要保留的备份文件数量。
"""
backup_files = sorted(os.listdir(folder_path))
if len(backup_files) > num_to_keep:
for file_to_delete in backup_files[:-num_to_keep]:
file_path = os.path.join(folder_path, file_to_delete)
os.remove(file_path)
def upload_to_oss(file_path, bucket_name, object_name):
# 配置阿里云的访问密钥
access_key_id = config.OSS_KEY_ID
access_key_secret = config.OSS_SECRET
endpoint = config.OSS_ENDPOINT
bucket_name = config.OSS_BUCKET_NAME
# 创建阿里云 OSS 客户端
auth = oss2.Auth(access_key_id, access_key_secret)
bucket = oss2.Bucket(auth, endpoint, bucket_name)
# 上传文件
bucket.put_object_from_file(object_name, file_path)
logging.info(f"File {object_name} uploaded to OSS successfully.")
def main():
# 创建备份文件夹的名称,基于当前时间
folder_name = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
backup_folder = config.BACKUP_FOLDER + folder_name
# 创建备份文件夹
create_backup_folder(backup_folder)
print(f"Backup folder created: {backup_folder}")
#for suffix in ['51', '52', '53']:
output_file = f"{backup_folder}/{config.DB_NAME}_{datetime.now().strftime('%Y%m%d')}.sql"
run_mysqldump(config.DB_NAME, output_file)
print(f"Backup for {config.DB_NAME} completed")
# 压缩备份文件夹
os.chdir(config.BACKUP_FOLDER)
compress_backup_folder(config.BACKUP_FOLDER, config.ZIP_PASSWORD,folder_name)
delete_old_backups(config.BACKUP_FOLDER,config.NUM_BACKUPS_TO_KEEP)
# 删除旧的备份文件以保持指定数量
#backup_files_count = len(os.listdir(config.BACKUP_FOLDER))
#if backup_files_count > config.NUM_BACKUPS_TO_KEEP:
# oldest_file = min(os.listdir(config.BACKUP_FOLDER), key=os.path.getctime)
# os.remove(os.path.join(config.BACKUP_FOLDER, oldest_file))
if __name__ == "__main__":
main()