[?] Ready
This commit is contained in:
parent
e98e5201b2
commit
315b38a150
1
dist_scripts/__init__.py
Normal file
1
dist_scripts/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from .build_packet import build
|
134
dist_scripts/build_packet.py
Normal file
134
dist_scripts/build_packet.py
Normal file
@ -0,0 +1,134 @@
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from .patchers import patch_core_build
|
||||
from .patchers import patch_metadata
|
||||
from .config import *
|
||||
|
||||
product_name = "None" # Автоматически берется из metadata
|
||||
|
||||
def get_pyinstaller_cmd():
|
||||
pyinstaller_cmd = \
|
||||
(f'pyinstaller --noconfirm --onedir --console --clean '
|
||||
f'--icon {path_fix + icon} --version-file {path_fix + metadata_path_txt} --name {product_name}'
|
||||
f'{"".join([f' --add-data {path_fix + d}' for d in data])} '
|
||||
f'--workpath {workpath} --distpath {distpath} --specpath {specpath} '
|
||||
f'--contents-directory {contents_directory} --optimize {optimize} '
|
||||
f'{"--disable-windowed-traceback " if disable_windowed_traceback else ""}'
|
||||
f'{"--uac-admin " if admin else ""}'
|
||||
f'{main}')
|
||||
logger.info(f"execute: {pyinstaller_cmd}")
|
||||
return pyinstaller_cmd.split(" ")
|
||||
|
||||
def calculate_sha256(file_path):
|
||||
sha256_hash = hashlib.sha256()
|
||||
with open(file_path, "rb") as f:
|
||||
for byte_block in iter(lambda: f.read(4096), b""):
|
||||
sha256_hash.update(byte_block)
|
||||
return sha256_hash.hexdigest()
|
||||
|
||||
# Функция для получения списка файлов с их хешами
|
||||
def generate_file_list_with_sha256(directory):
|
||||
file_list = {}
|
||||
for root, dirs, files in os.walk(directory):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
file_sha256 = calculate_sha256(file_path)
|
||||
file_list[file_path] = (file_sha256, os.path.getsize(file_path))
|
||||
return file_list
|
||||
|
||||
# Функция для нахождения различий
|
||||
def find_differences(old, new):
|
||||
diff = {}
|
||||
# Найдем файлы, которые есть в одном списке, но не в другом
|
||||
for file_path, (sha256, size) in old.items():
|
||||
if file_path not in new:
|
||||
diff[file_path] = ('deleted', sha256)
|
||||
elif sha256 != new[file_path][0]:
|
||||
diff[file_path] = ('updated', sha256)
|
||||
|
||||
for file_path, (sha256, size) in new.items():
|
||||
if file_path not in old:
|
||||
diff[file_path] = ('new_file', sha256)
|
||||
return diff
|
||||
|
||||
def save_sha256(file_list, path:str=sha_file):
|
||||
with open(path, 'w') as file:
|
||||
# noinspection PyTypeChecker
|
||||
json.dump(file_list, file, indent=4)
|
||||
|
||||
def read_sha256():
|
||||
with open(sha_file, 'r') as file:
|
||||
return json.load(file)
|
||||
|
||||
def prepare_dist():
|
||||
for f in glob.glob(os.path.join(distpath, "warn*.txt")):
|
||||
os.remove(f)
|
||||
new_shas = generate_file_list_with_sha256(distpath)
|
||||
if not sha_file.exists():
|
||||
save_sha256(new_shas)
|
||||
return new_shas
|
||||
|
||||
def generate_patch(old_ver, new_ver, diff):
|
||||
update_dir = patch_dir.format(old_ver=old_ver, new_ver=new_ver)
|
||||
os.makedirs(update_dir, exist_ok=True)
|
||||
|
||||
for file_path, (status, _) in diff.copy().items():
|
||||
rel_path = os.path.relpath(file_path, distpath + product_name) # Убираем лишнюю часть пути
|
||||
del diff[file_path] # Удаляем из списка файлов
|
||||
diff.update({rel_path: status}) # Добавляем новый путь
|
||||
if status == 'deleted':
|
||||
continue
|
||||
target_path = os.path.join(update_dir, rel_path) # Куда копировать
|
||||
os.makedirs(os.path.dirname(target_path), exist_ok=True) # Создаем папку, если её нет
|
||||
shutil.copy(file_path, target_path)
|
||||
|
||||
save_sha256(diff, patch_file.format(old_ver=old_ver, new_ver=new_ver))
|
||||
return update_dir
|
||||
|
||||
def zip_latest(version):
|
||||
shutil.make_archive(f"{dist_dir}/{product_name}-{version}", 'zip', latest_dir)
|
||||
|
||||
def update_latest():
|
||||
shutil.rmtree(latest_dir)
|
||||
shutil.copytree(distpath + product_name, latest_dir)
|
||||
|
||||
def cleanup():
|
||||
shutil.rmtree(build_dir)
|
||||
os.makedirs(build_dir, exist_ok=True)
|
||||
|
||||
def build():
|
||||
global product_name
|
||||
new_ver = patch_core_build()
|
||||
old_ver, product_name = patch_metadata(*new_ver)
|
||||
logger.info("Building...")
|
||||
# subprocess.run(['auto-py-to-exe', '--config', build_json_path], shell=True)
|
||||
subprocess.run(get_pyinstaller_cmd())
|
||||
if not os.path.exists(distpath):
|
||||
logger.info("[ERR] Build unsuccessful")
|
||||
return
|
||||
logger.info("Build successful")
|
||||
logger.info("Preparing dist")
|
||||
logger.info(" - generating sha256")
|
||||
new_sha = prepare_dist()
|
||||
old_sha = read_sha256()
|
||||
|
||||
logger.info(" - comparing..")
|
||||
diff = find_differences(old_sha, new_sha)
|
||||
logger.info(f" - {len(diff)} differences found")
|
||||
new_ver = f"{new_ver[0]}.{new_ver[1]}.{new_ver[2]}.{new_ver[3]}"
|
||||
if diff:
|
||||
update_dir = generate_patch(old_ver, new_ver, diff)
|
||||
logger.info(f" - diffs in: {update_dir.split("/")[-1]}")
|
||||
|
||||
logger.info(" - saving..")
|
||||
save_sha256(new_sha)
|
||||
update_latest()
|
||||
zip_latest(new_ver)
|
||||
logger.info(" - cleaning up")
|
||||
cleanup()
|
||||
logger.info("Ready")
|
||||
shutil.copy(log_file, log_dir / f"build_{new_ver}.log")
|
52
dist_scripts/config.py
Normal file
52
dist_scripts/config.py
Normal file
@ -0,0 +1,52 @@
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from loguru import logger
|
||||
|
||||
|
||||
# Пути к файлам
|
||||
core_path = './src/core/core.py'
|
||||
metadata_path = './win/metadata.yml'
|
||||
metadata_path_txt = './win/version_file.txt'
|
||||
build_json_path = './win/build.json'
|
||||
|
||||
# Настройки сборки
|
||||
main = 'src/main.py'
|
||||
icon = "./src/resources/ico/icon_dark.ico"
|
||||
data = [
|
||||
"./src/resources;resources/", # Папка с ресурсами для UI и т.д.
|
||||
"./.venv/Lib/site-packages/customtkinter;customtkinter/", # Папка с библиотекой customtkinter
|
||||
]
|
||||
|
||||
path_fix = os.path.abspath(os.path.dirname(__file__)) + "/../../"
|
||||
|
||||
contents_directory= "."
|
||||
optimize=2
|
||||
disable_windowed_traceback=True
|
||||
admin=False
|
||||
|
||||
build_dir = 'win/build'
|
||||
os.makedirs(build_dir, exist_ok=True)
|
||||
distpath = f'{build_dir}/dist/'
|
||||
workpath = f'{build_dir}/build/'
|
||||
specpath = f'{build_dir}'
|
||||
|
||||
win_dir = Path('win')
|
||||
|
||||
dist_dir = win_dir / 'output'
|
||||
sha_file = dist_dir / 'latest.json'
|
||||
latest_dir = dist_dir / 'latest'
|
||||
patch_dir = 'win/output/Patch {old_ver}-{new_ver}'
|
||||
patch_file = 'win/output/Patch {old_ver}-{new_ver}/patch.json'
|
||||
|
||||
log_dir = win_dir / 'logs'
|
||||
log_file = log_dir / 'latest.log'
|
||||
os.makedirs(log_dir, exist_ok=True)
|
||||
os.remove(log_file) if log_file.exists() else None
|
||||
logger.remove()
|
||||
fmt = "<green>{elapsed}</green> {message}"
|
||||
logger.add(sys.stdout, level="INFO", format=fmt)
|
||||
logger.add(log_file, level="INFO", format=fmt)
|
||||
|
||||
os.makedirs(latest_dir, exist_ok=True)
|
2
dist_scripts/patchers/__init__.py
Normal file
2
dist_scripts/patchers/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
from .core_build import patch_core_build
|
||||
from .metadata import patch_metadata
|
31
dist_scripts/patchers/core_build.py
Normal file
31
dist_scripts/patchers/core_build.py
Normal file
@ -0,0 +1,31 @@
|
||||
from ..config import *
|
||||
|
||||
|
||||
def patch_core_build():
|
||||
logger.info("Patching core")
|
||||
with open(core_path, 'r', encoding="utf-8") as file:
|
||||
core = file.read()
|
||||
logger.info(" - core loaded")
|
||||
logger.info(" - unpack values...")
|
||||
ver_data_index = core.find("'", core.find("__version__ = "))
|
||||
major, minor, patch = core[ver_data_index + 1:core.find("'", ver_data_index + 1)].split('.')
|
||||
|
||||
build_index_start = core.find("__build__ = ")
|
||||
build_index_stop = core.find("\n", build_index_start)
|
||||
build_i = int(core[build_index_start + 12:build_index_stop])
|
||||
|
||||
logger.info(f" - current version: {major}.{minor}.{patch}.{build_i}")
|
||||
build_i += 1
|
||||
logger.info(f" - patched version: {major}.{minor}.{patch}.{build_i}")
|
||||
|
||||
logger.info(" - patching")
|
||||
core = core[:build_index_start] + f"__build__ = {build_i}" + core[build_index_stop:]
|
||||
with open(core_path, 'w', encoding="utf-8") as file:
|
||||
file.write(core)
|
||||
logger.info(" - saved")
|
||||
logger.info("Ready")
|
||||
return major, minor, patch, build_i
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
patch_core_build()
|
26
dist_scripts/patchers/metadata.py
Normal file
26
dist_scripts/patchers/metadata.py
Normal file
@ -0,0 +1,26 @@
|
||||
import subprocess
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
from loguru import logger
|
||||
from ..config import metadata_path, metadata_path_txt
|
||||
|
||||
yaml = YAML()
|
||||
|
||||
|
||||
def patch_metadata(major, minor, patch, build_i):
|
||||
logger.info("Patching metadata")
|
||||
with open(metadata_path, 'r') as file:
|
||||
metadata = yaml.load(file)
|
||||
logger.info(" - metadata loaded")
|
||||
product_name = metadata['ProductName']
|
||||
old_data = metadata['Version']
|
||||
logger.info(f" - current version: {old_data}")
|
||||
logger.info(f" - patched version: {major}.{minor}.{patch}.{build_i}")
|
||||
metadata['Version'] = f'{major}.{minor}.{patch}.{build_i}'
|
||||
with open(metadata_path, 'w') as file:
|
||||
yaml.dump(metadata, file)
|
||||
logger.info(" - saved")
|
||||
logger.info(" - creating version file")
|
||||
subprocess.run(['create-version-file', metadata_path, '--outfile', metadata_path_txt])
|
||||
logger.info("Ready")
|
||||
return old_data, product_name
|
Loading…
x
Reference in New Issue
Block a user