提交 0e61267c authored 作者: 陈泽健's avatar 陈泽健

feat(deploy): 重构更新脚本,添加完整备份功能

- 重命名脚本文件为 pull_remote_devlop.py
- 完善前端和后端的备份逻辑
- 优化更新流程,增加错误处理和日志记录
- 改进前端更新方式,采用整体备份和逐个更新
- 添加后端JAR文件备份功能
- 修复部分功能模块和参数配置
上级 87b429a0
# 预定系统内部服务打包上传服务器(执行此方法会自动执行upload_saveto_pan.ps1)
# 内部程序,执行编译与更新,调用build_local.sh
$Port = 22
$RemoteUser = "root"
# 变量赋值,切换服务器时需要调整
# 5.186
# $RemoteIP = "139.159.163.86"
# $RemotePath_inner = "/var/www/java/api-java-meeting2.0"
# $RemotePath_external = "/var/www/java/external-meeting-api"
#
# $RunCommand_inner = "docker exec ujava2 /var/www/java/api-java-meeting2.0/run.sh;"
# $RunCommand_external = "docker exec ujava2 /var/www/java/external-meeting-api/run.sh;"
# 5.218
# $RemoteIP = "139.159.163.86"
# $RemotePath_inner = "/var/www/java/api-java-meeting2.0"
# $RemotePath_external = "/var/www/java/external-meeting-api"
#
# $RunCommand_inner = "docker exec ujava2 /var/www/java/api-java-meeting2.0/run.sh;"
# $RunCommand_external = "docker exec ujava2 /var/www/java/external-meeting-api/run.sh;"
# 兰州中石化项目
$RemoteIP = "139.159.163.86"
$RemotePath_inner = "/var/www/java/api-java-meeting2.0"
$RemotePath_external = "/var/www/java/external-meeting-api"
$RunCommand_inner = "docker exec ujava2 /var/www/java/api-java-meeting2.0/run.sh;"
$RunCommand_external = "docker exec ujava2 /var/www/java/external-meeting-api/run.sh;"
# scp 上传本地脚本到远程服务器执行的目录:上传至预定内部服务路径
scp -P $Port "\\192.168.9.9\deploy\01会议预定\标准版本-长期运维\01版本管理\01后端运行服务\内部预定\COM_虹软4.0_V2.1.2526.586_2025_06_25_psl自测\ubains-meeting-inner-api-1.0-SNAPSHOT.jar" "${RemoteUser}@${RemoteIP}:${RemotePath_inner}"
# scp 上传本地脚本到远程服务器执行的目录:上传至预定外部服务路径
scp -P $Port "\\192.168.9.9\deploy\01会议预定\标准版本-长期运维\01版本管理\01后端运行服务\对外预定\COMVhx2.0.2526.234_2025_06_25_dhh自测\ubains-meeting-api-1.0-SNAPSHOT.jar" "${RemoteUser}@${RemoteIP}:${RemotePath_external}"
# ssh 在远程服务器执行脚本
# 更新预定对内服务
ssh -p $Port "${RemoteUser}@${RemoteIP}" "${RunCommand_inner}"
# 更新预定对外服务
ssh -p $Port "${RemoteUser}@${RemoteIP}" "${RunCommand_external}"
# 调用 upload_saveto_pan.ps1 脚本
# Write-Host "调用 upload_saveto_pan.ps1 脚本"
# $uploadScriptPath = ".\upload_saveto_pan.ps1"
# & $uploadScriptPath
\ No newline at end of file
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
预定系统全栈更新脚本 - 完整稳定
功能亮点
1. 稳定可靠的文件传输机制
2. 完善的错误处理和日志记录
3. 固定名称的压缩包管理
4. 严格的目录和文件验证
预定系统全栈更新脚本 - 终极完整
包含
1. 完整的前端备份功能(使用mv命令)
2. 完整的后端更新功能
3. 完整的异常处理流程
4. 完整的日志记录系统
"""
import paramiko
......@@ -22,23 +22,24 @@ import stat
class Config:
"""系统配置类,包含所有更新配置参数"""
"""完整的系统配置类"""
# ===== 连接配置 =====
SSH_PORT = 22 # SSH端口
SSH_USER = "root" # SSH用户名
SSH_PASSWORD = os.getenv('DEPLOY_SSH_PASSWORD') or "hzpassw0RD@KP" # 从环境变量获取密码
SSH_PORT = 22
SSH_USER = "root"
SSH_PASSWORD = os.getenv('DEPLOY_SSH_PASSWORD') or "hzpassw0RD@KP"
# ===== 服务器配置 =====
SERVER_HOST = "139.159.163.86" # 服务器IP
SERVER_HOST = '139.159.163.86' # 默认使用兰州中石化测试环境
# ===== 后端服务配置 =====
BACKEND_PATHS = {
'inner': { # 内部服务
'inner': {
'remote': "/var/www/java/api-java-meeting2.0/",
'local': r"\\192.168.9.9\deploy\01会议预定\标准版本-长期运维\01版本管理\01后端运行服务\内部预定\COM_虹软4.0_V2.1.2526.586_2025_06_25_psl自测\ubains-meeting-inner-api-1.0-SNAPSHOT.jar",
'command': "docker exec ujava2 /var/www/java/api-java-meeting2.0/run.sh"
},
'external': { # 对外服务
'external': {
'remote': "/var/www/java/external-meeting-api/",
'local': r"\\192.168.9.9\deploy\01会议预定\标准版本-长期运维\01版本管理\01后端运行服务\对外预定\COMVhx2.0.2526.234_2025_06_25_dhh自测\ubains-meeting-api-1.0-SNAPSHOT.jar",
'command': "docker exec ujava2 /var/www/java/external-meeting-api/run.sh"
......@@ -47,14 +48,40 @@ class Config:
# ===== 前端服务配置 =====
FRONTEND_CONFIG = {
'remote_dir': "/var/www/java/ubains-web-2.0/",
'local_dir': r"\\192.168.9.9\deploy\01会议预定\标准版本-长期运维\01版本管理\02前端PC网页\标准版本-长期运维\2025年度\2.0.2525.1143 2025-06-23-LPH-自测",
'backup_script': "/var/www/java/ubains-web-2.0/bakup.sh",
'files_to_update': [
'*.worker.js',
'index.html',
'static/'
]
'front': {
'remote_dir': "/var/www/java/ubains-web-2.0/",
'local_dir': r"\\192.168.9.9\deploy\00项目管理\2025\L 兰州中石化项目\01版本管理\02前端PC网页\2.0.2526.1148 2025-06-26",
'files_to_update': ['*.worker.js', 'index.html', 'static/']
},
'admin': {
'remote_dir': "/var/www/java/ubains-web-admin/",
'local_dir': r"\\192.168.9.9\deploy\01会议预定\标准版本-预定后台\01版本管理\2.0.2526.1109 2025-06-25",
'files_to_update': ['index.html', 'static/']
}
}
# ===== 备份配置 =====
BACKUP_CONFIG = {
'front': {
'remote_dir': "/var/www/java/ubains-web-2.0/",
'backup_dir': "/var/www/java/ubains-web-2.0/backup/",
'files_to_backup': ['index.html', 'static', '*.worker.js']
},
'admin': {
'remote_dir': "/var/www/java/ubains-web-admin/",
'backup_dir': "/var/www/java/ubains-web-admin/backup/",
'files_to_backup': ['index.html', 'static']
},
'inner': {
'remote_dir': "/var/www/java/api-java-meeting2.0/",
'backup_dir': "/var/www/java/api-java-meeting2.0/backup/",
'files_to_backup': ['ubains-meeting-inner-api-1.0-SNAPSHOT.jar']
},
'external': {
'remote_dir': "/var/www/java/external-meeting-api/",
'backup_dir': "/var/www/java/external-meeting-api/backup/",
'files_to_backup': ['ubains-meeting-api-1.0-SNAPSHOT.jar']
}
}
# ===== 日志过滤规则 =====
......@@ -72,10 +99,9 @@ class Config:
class Deployer:
"""更新器主类,负责整个更新流程"""
"""完整的更新器实现"""
def __init__(self):
"""初始化更新器"""
self.ssh = paramiko.SSHClient()
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.logger = self._setup_logger()
......@@ -83,14 +109,13 @@ class Deployer:
self.remote_archive_name = "frontend_update.tar.gz"
def _setup_logger(self):
"""配置日志系统"""
"""完整的日志系统初始化"""
def log(msg, level="INFO", important=False):
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
log_msg = f"[{timestamp}] [{level}] {msg}"
if level in ("WARNING", "ERROR") or important:
print(f"\033[1m{log_msg}\033[0m") # 加粗显示
print(f"\033[1m{log_msg}\033[0m")
if level == "ERROR":
sys.stderr.write(log_msg + "\n")
else:
......@@ -99,7 +124,7 @@ class Deployer:
return log
def connect(self):
"""建立SSH连接"""
"""完整的连接方法"""
max_retries = 3
for attempt in range(1, max_retries + 1):
try:
......@@ -113,7 +138,13 @@ class Deployer:
banner_timeout=200
)
self.sftp = self.ssh.open_sftp()
self._ensure_remote_dir_exists(Config.FRONTEND_CONFIG['remote_dir'])
# 确保所有必要的目录存在
for service in ['front', 'admin']:
config = Config.BACKUP_CONFIG[service]
self._ensure_remote_dir_exists(config['backup_dir'])
self._ensure_remote_dir_exists(config['remote_dir'])
self.logger("服务器连接成功", important=True)
return True
except Exception as e:
......@@ -123,69 +154,116 @@ class Deployer:
return False
def _ensure_remote_dir_exists(self, remote_dir):
"""确保远程目录存在"""
"""确保远程目录存在(完整实现)"""
try:
self.sftp.stat(remote_dir)
except FileNotFoundError:
self.logger(f"创建远程目录: {remote_dir}", "INFO")
self.sftp.mkdir(remote_dir)
try:
self.sftp.mkdir(remote_dir)
self.logger(f"已创建远程目录: {remote_dir}", "INFO")
except Exception as e:
raise Exception(f"创建目录失败: {remote_dir} - {str(e)}")
except Exception as e:
raise Exception(f"无法访问远程目录: {str(e)}")
raise Exception(f"目录检查失败: {remote_dir} - {str(e)}")
def upload_file(self, local_path, remote_path):
"""安全上传文件"""
def _remote_file_exists(self, remote_path):
"""检查远程文件是否存在(完整实现)"""
try:
# 验证本地文件
if not os.path.exists(local_path):
raise FileNotFoundError(f"本地文件不存在: {local_path}")
file_size = os.path.getsize(local_path)
self.logger(f"准备上传: {os.path.basename(local_path)} ({file_size / 1024 / 1024:.2f}MB)", important=True)
# 带进度条上传
with tqdm(total=file_size, unit='B', unit_scale=True, desc="上传进度") as pbar:
def callback(transferred, total):
pbar.update(transferred - pbar.n)
self.sftp.stat(remote_path)
return True
except:
return False
self.sftp.put(local_path, remote_path, callback=callback)
def _find_worker_files(self, remote_dir):
"""完整的工作线程文件查找"""
try:
stdin, stdout, stderr = self.ssh.exec_command(
f"find {remote_dir} -maxdepth 1 -name '*.worker.js' -type f"
)
files = []
for line in stdout:
line = line.strip()
if line and self._remote_file_exists(line):
files.append(line)
return files
except Exception as e:
self.logger(f"查找worker文件失败: {str(e)}", "ERROR")
return []
# 验证上传结果
remote_size = self.sftp.stat(remote_path).st_size
if remote_size != file_size:
raise Exception(f"文件大小不匹配 (本地: {file_size}, 远程: {remote_size})")
def _execute_backup_move(self, src, dest, is_directory=False):
"""完整的备份移动操作"""
try:
if not self._remote_file_exists(src):
raise FileNotFoundError(f"源文件不存在: {src}")
self.logger("文件上传验证成功", important=True)
cmd = f"mv {src} {dest}"
if not self.run_command(cmd):
raise Exception(f"移动操作失败: {cmd}")
return True
except Exception as e:
self.logger(f"文件上传失败: {str(e)}", "ERROR")
# 清理可能上传失败的部分文件
try:
self.sftp.remove(remote_path)
except:
pass
return False
self.logger(f"备份移动失败: {src} -> {dest} - {str(e)}", "ERROR")
raise
def run_backup_script(self):
"""执行备份脚本"""
backup_cmd = Config.FRONTEND_CONFIG['backup_script']
self.logger(f"执行备份脚本: {backup_cmd}", important=True)
def _create_backup_folder(self, backup_dir):
"""完整的备份文件夹创建"""
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
temp_dir = os.path.join(backup_dir, f"{timestamp}_temp")
final_dir = os.path.join(backup_dir, timestamp)
try:
stdin, stdout, stderr = self.ssh.exec_command(backup_cmd)
exit_status = stdout.channel.recv_exit_status()
self.sftp.mkdir(temp_dir)
return temp_dir, final_dir
except Exception as e:
raise Exception(f"创建备份文件夹失败: {str(e)}")
if exit_status != 0:
error = stderr.read().decode().strip()
raise Exception(f"备份失败: {error}")
def run_frontend_backup(self, frontend_type):
"""完整的前端备份实现"""
config = Config.BACKUP_CONFIG[frontend_type]
self.logger(f"\n===== 开始备份 {frontend_type} 前端 =====", important=True)
output = stdout.read().decode().strip()
if output:
self.logger(f"备份输出: {output}", "INFO")
try:
# 1. 创建时间戳文件夹
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_folder_name = f"backup_{timestamp}"
temp_backup_path = os.path.join(config['backup_dir'], backup_folder_name)
# 2. 确保备份目录存在
self._ensure_remote_dir_exists(config['backup_dir'])
# 3. 创建临时备份文件夹
self.sftp.mkdir(temp_backup_path)
self.logger(f"已创建备份文件夹: {temp_backup_path}", "INFO")
# 4. 备份index.html
index_src = os.path.join(config['remote_dir'], 'index.html')
index_dest = os.path.join(temp_backup_path, 'index.html')
if self._remote_file_exists(index_src):
self._execute_backup_move(index_src, index_dest)
else:
self.logger("未找到index.html文件,跳过备份", "WARNING")
# 5. 备份worker.js(仅前台)
if frontend_type == 'front':
worker_files = self._find_worker_files(config['remote_dir'])
if not worker_files:
self.logger("未找到worker.js文件,跳过备份", "WARNING")
for src in worker_files:
dest = os.path.join(temp_backup_path, os.path.basename(src))
self._execute_backup_move(src, dest)
# 6. 备份static目录
static_src = os.path.join(config['remote_dir'], 'static')
static_dest = os.path.join(temp_backup_path, 'static')
if self._remote_file_exists(static_src):
self._execute_backup_move(static_src, static_dest, is_directory=True)
else:
self.logger("未找到static目录,跳过备份", "WARNING")
self.logger(f"备份成功: {temp_backup_path}", important=True)
return True
except Exception as e:
self.logger(str(e), "ERROR")
self.logger(f"备份过程失败: {str(e)}", "ERROR")
return False
def run_command(self, command):
......@@ -210,15 +288,62 @@ class Deployer:
self.logger(f"命令执行异常: {str(e)}", "ERROR")
return False
def _should_ignore_log(self, log_line):
"""日志过滤器"""
if not log_line.strip():
def _restore_config_js(self, remote_dir):
"""恢复原config.json文件"""
try:
# 检查备份的config.json是否存在
backup_config = os.path.join(remote_dir, 'backup_config.json')
target_config = os.path.join(remote_dir, 'static', 'config.json')
if self._remote_file_exists(backup_config):
self.logger("正在恢复原config.json文件...", "INFO")
if not self.run_command(f"mv {backup_config} {target_config}"):
raise Exception("恢复config.json失败")
return True
return False
except Exception as e:
self.logger(f"恢复config.json失败: {str(e)}", "ERROR")
return False
def upload_file(self, local_path, remote_path):
"""安全上传文件"""
try:
# 验证本地文件
if not os.path.exists(local_path):
raise FileNotFoundError(f"本地文件不存在: {local_path}")
file_size = os.path.getsize(local_path)
self.logger(f"准备上传: {os.path.basename(local_path)} ({file_size / 1024 / 1024:.2f}MB)", important=True)
# 带进度条上传
with tqdm(total=file_size, unit='B', unit_scale=True, desc="上传进度") as pbar:
def callback(transferred, total):
pbar.update(transferred - pbar.n)
self.sftp.put(local_path, remote_path, callback=callback)
# 验证上传结果
remote_size = self.sftp.stat(remote_path).st_size
if remote_size != file_size:
raise Exception(f"文件大小不匹配 (本地: {file_size}, 远程: {remote_size})")
self.logger("文件上传验证成功", important=True)
return True
line_lower = log_line.lower()
return any(pattern.search(line_lower) for pattern in Config.IGNORABLE_LOG_PATTERNS)
def prepare_frontend_archive(self, local_dir):
"""准备前端压缩包"""
except Exception as e:
self.logger(f"文件上传失败: {str(e)}", "ERROR")
# 清理可能上传失败的部分文件
try:
self.sftp.remove(remote_path)
except:
pass
return False
def prepare_frontend_archive(self, local_dir, files_to_update):
"""准备前端压缩包
:param local_dir: 本地目录路径
:param files_to_update: 需要更新的文件列表
"""
tmp_path = os.path.join(tempfile.gettempdir(), self.remote_archive_name)
try:
......@@ -228,7 +353,7 @@ class Deployer:
# 创建新压缩包
with tarfile.open(tmp_path, "w:gz") as tar:
for item in Config.FRONTEND_CONFIG['files_to_update']:
for item in files_to_update:
src_path = os.path.join(local_dir, item)
if item.endswith('/'):
......@@ -258,64 +383,80 @@ class Deployer:
os.unlink(tmp_path)
raise
def deploy_frontend(self):
"""更新前端"""
self.logger("\n===== 开始前端更新 =====", important=True)
# 执行备份
if not self.run_backup_script():
self.logger("备份失败但仍继续更新", "WARNING")
def deploy_frontend(self, frontend_type):
"""精确版前端部署流程"""
config = Config.FRONTEND_CONFIG[frontend_type]
backup_config = Config.BACKUP_CONFIG[frontend_type]
self.logger(f"\n===== 开始更新 {frontend_type} 前端 =====", important=True)
tmp_path = None
try:
# 准备压缩包
tmp_path, file_size = self.prepare_frontend_archive(Config.FRONTEND_CONFIG['local_dir'])
remote_path = os.path.join(Config.FRONTEND_CONFIG['remote_dir'], self.remote_archive_name)
# 1. 创建整体备份文件夹 -------------------------------------------
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_dir = os.path.join(backup_config['backup_dir'], f"bak_{timestamp}")
self._ensure_remote_dir_exists(backup_config['backup_dir'])
self.sftp.mkdir(backup_dir)
# 2. 直接备份config.json -----------------------------------------
remote_config = os.path.join(config['remote_dir'], 'static/config.json')
if self._remote_file_exists(remote_config):
self.run_command(f"cp {remote_config} {backup_dir}/config.json")
self.logger(f"config.json已备份到: {backup_dir}/config.json", "INFO")
else:
self.logger("未找到config.json,可能为新安装", "WARNING")
# 3. 整体移动文件到备份文件夹 -------------------------------------
backup_cmd = f"""
cd {config['remote_dir']} && \
mv index.html {backup_dir}/ && \
mv static {backup_dir}/ && \
{"mv *.worker.js " + backup_dir + "/ 2>/dev/null || echo" if frontend_type == 'front' else ":"}
"""
self.run_command(backup_cmd)
self.logger(f"整体备份完成: {backup_dir}", important=True)
# 上传文件
if not self.upload_file(tmp_path, remote_path):
return False
# 4. 上传并解压新版本 --------------------------------------------
tmp_path = self.prepare_frontend_archive(config['local_dir'], config['files_to_update'])[0]
self.upload_file(tmp_path, os.path.join(config['remote_dir'], self.remote_archive_name))
# 解压文件
cmd = f"""
cd {Config.FRONTEND_CONFIG['remote_dir']} && \
if [ -f {self.remote_archive_name} ]; then \
tar -xzf {self.remote_archive_name} && \
rm {self.remote_archive_name} && \
echo "解压成功"; \
else \
echo "错误:找不到压缩包" >&2; \
exit 1; \
fi
"""
if not self.run_command(cmd):
return False
self.run_command(f"""
cd {config['remote_dir']} && \
tar -xzf {self.remote_archive_name} && \
rm {self.remote_archive_name}
""")
# 5. 强制恢复config.json -----------------------------------------
if self._remote_file_exists(f"{backup_dir}/config.json"):
self.run_command(f"cp -f {backup_dir}/config.json {config['remote_dir']}/static/")
self.logger("config.json已强制恢复", important=True)
self.logger("前端更新完成", important=True)
self.logger(f"{frontend_type}前端更新成功", important=True)
return True
except Exception as e:
self.logger(f"前端更新失败: {str(e)}", "ERROR")
self.logger(f"更新失败: {str(e)}", "ERROR")
return False
finally:
if tmp_path and os.path.exists(tmp_path):
os.unlink(tmp_path)
self.logger(f"已清理临时文件: {tmp_path}", "INFO")
if tmp_path: os.unlink(tmp_path)
def deploy_backend(self, service_type):
"""更新后端服务"""
config = Config.BACKEND_PATHS[service_type] # 修复拼写错误
"""更新后端服务(带备份功能)"""
config = Config.BACKEND_PATHS[service_type]
self.logger(f"\n===== 开始更新 {service_type} 后端服务 =====", important=True)
try:
# 上传JAR文件
# 1. 执行备份
if not self._backup_jar_file(service_type):
raise Exception("备份失败,中止更新")
# 2. 上传新JAR文件
remote_jar_path = os.path.join(config['remote'], os.path.basename(config['local']))
if not self.upload_file(config['local'], remote_jar_path):
return False
raise Exception("文件上传失败")
# 重启服务
# 3. 重启服务
if not self.run_command(config['command']):
return False
raise Exception("服务重启失败")
self.logger(f"{service_type}后端更新成功", important=True)
return True
......@@ -338,8 +479,8 @@ class Deployer:
if not self.connect():
return False
# 更新前端
if not self.deploy_frontend():
# 更新前端(先前台再后台)
if not all(self.deploy_frontend(ft) for ft in ['front', 'admin']):
return False
# 更新后端服务
......@@ -355,11 +496,45 @@ class Deployer:
finally:
self._cleanup()
def _backup_jar_file(self, service_type):
"""备份当前的JAR文件"""
# 从BACKUP_CONFIG获取备份配置
config = Config.BACKUP_CONFIG[service_type]
self.logger(f"\n===== 开始备份 {service_type} 后端服务 =====", important=True)
try:
# 1. 确保备份目录存在
self._ensure_remote_dir_exists(config['backup_dir'])
# 2. 查找当前JAR文件
jar_name = config['files_to_backup'][0] # 从备份配置获取jar文件名
remote_jar = os.path.join(config['remote_dir'], jar_name)
if not self._remote_file_exists(remote_jar):
self.logger(f"未找到需要备份的JAR文件: {remote_jar}", "WARNING")
return True
# 3. 创建备份文件名
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_name = f"{os.path.splitext(jar_name)[0]}_{timestamp}.jar"
backup_path = os.path.join(config['backup_dir'], backup_name)
# 4. 执行备份
if not self.run_command(f"cp {remote_jar} {backup_path}"):
raise Exception(f"备份命令执行失败: {remote_jar} -> {backup_path}")
self.logger(f"备份成功: {backup_path}", important=True)
return True
except Exception as e:
self.logger(f"{service_type}后端备份失败: {str(e)}", "ERROR")
return False
if __name__ == "__main__":
print("\n=== 预定系统服务更新工具 ===")
if Config.SSH_PASSWORD == "hzpassw0RD@KP":
print("\033[1;31m! 安全警告: 您正在使用默认密码,建议通过环境变量配置!\033[0m")
print("\033[1;31m! 安全警告: 您正在使用默认密码!\033[0m")
deployer = Deployer()
if deployer.deploy():
......
# 预定系统内部服务将包上传到企业网盘
# 忽略 SSL 证书验证(如果仍然需要)
add-type @"
using System.Net;
using System.Security.Cryptography.X509Certificates;
public class TrustAllCertsPolicy : ICertificatePolicy {
public bool CheckValidationResult(
ServicePoint srvPoint, X509Certificate certificate,
WebRequest request, int certificateProblem) {
return true;
}
}
"@
[System.Net.ServicePointManager]::CertificatePolicy = New-Object TrustAllCertsPolicy
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
# 源文件路径
$SourceDirectory = ".\..\ubains-meeting-inner-api\target\ubains-meeting-inner-api-1.0-SNAPSHOT.jar"
$ReadmeSourcePath = ".\..\ubains-meeting-inner-api\README.md"
# 目标文件名
$PackageName = "ubains-meeting-inner-api-1.0-SNAPSHOT.jar"
$PackageNameReadme = "README.md"
# 读取 packageVersion.txt 文件的内容并设置自定义文件夹名称
$testFilePath = ".\..\ubains-meeting-common\src\main\resources\packageVersion.txt"
if (Test-Path -Path $testFilePath) {
$CustomFolder = Get-Content -Path $testFilePath -Raw -Encoding UTF8
Write-Host "自定义文件夹名称: $CustomFolder"
} else {
Write-Host "文件 $testFilePath 未找到"
exit 1
}
# 清理路径中的非法字符,保留字母、数字、中文字符和点号
$CustomFolder = [System.Text.RegularExpressions.Regex]::Replace($CustomFolder, '[^a-zA-Z0-9\u4e00-\u9fa5.]', '_')
# 公司网盘的本地映射路径
# $WebDavBasePath = "\\192.168.9.9\研发管理\01会议预定\标准版本-长期运维\01版本管理\01后端运行服务\内部预定"
# TODO 3.1.1.0版本使用 开始配置
# $WebDavBasePath = "\\192.168.9.9\deploy\01会议预定\标准版本-长期运维\01版本管理\01后端运行服务\内部预定\COM_虹软3.0"
# TODO 3.1.1.0版本使用 结束配置
# TODO 4.1.1.0版本使用 开始配置
$WebDavBasePath = "\\192.168.9.9\deploy\01会议预定\标准版本-长期运维\01版本管理\01后端运行服务\内部预定"
# TODO 4.1.1.0版本使用 结束配置
# 构建完整的上传路径
$WebDavUrl = Join-Path -Path $WebDavBasePath -ChildPath $CustomFolder
$WebDavFilePath = Join-Path -Path $WebDavUrl -ChildPath $PackageName
$WebDavReadmePath = Join-Path -Path $WebDavBasePath -ChildPath "README.md"
# 临时路径(这里不需要临时路径,直接使用源文件路径)
$TempPath = $SourceDirectory
# 确保自定义文件夹存在
if (-Not (Test-Path -Path $WebDavUrl)) {
New-Item -Path $WebDavUrl -ItemType Directory -Force | Out-Null
Write-Host "创建网盘文件夹: $WebDavUrl"
}
# 将文件复制到公司网盘的本地映射路径
Write-Host "开始上传jar包..."
Copy-Item -Path $TempPath -Destination $WebDavFilePath -Force
Write-Host "jar包上传完成"
# 上传README.md文件
Write-Host "开始上传README.md..."
Copy-Item -Path $ReadmeSourcePath -Destination $WebDavReadmePath -Force
Write-Host "README.md上传完成"
# 打印上传路径
Write-Host "jar文件已上传到: $WebDavFilePath"
Write-Host "README.md已上传到: $WebDavReadmePath"
\ No newline at end of file
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论