Skip to content

Commit

Permalink
initial Pyside rework
Browse files Browse the repository at this point in the history
  • Loading branch information
Ati1707 committed Feb 2, 2025
1 parent 410bb1b commit 5f4b66f
Show file tree
Hide file tree
Showing 8 changed files with 389 additions and 322 deletions.
29 changes: 19 additions & 10 deletions content_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,28 +7,30 @@

lock = threading.Lock()


def connect_database(db_path: str = "database/archives.db") -> sqlite3.Connection:
"""Connect to the SQLite database and ensure necessary tables exist."""
conn = sqlite3.connect(db_path)
conn.execute("PRAGMA foreign_keys = ON")

with conn:
conn.execute('''
conn.execute("""
CREATE TABLE IF NOT EXISTS archives (
id INTEGER PRIMARY KEY,
archive_name TEXT NOT NULL
)
''')
conn.execute('''
""")
conn.execute("""
CREATE TABLE IF NOT EXISTS files (
id INTEGER PRIMARY KEY,
archive_id INTEGER,
file_name TEXT NOT NULL,
FOREIGN KEY (archive_id) REFERENCES archives (id) ON DELETE CASCADE
)
''')
""")
return conn


def add_archive(archive_name: str, files: list[str]) -> None:
"""
Add a new archive and its associated files to the database.
Expand All @@ -40,7 +42,9 @@ def add_archive(archive_name: str, files: list[str]) -> None:
with connect_database() as conn:
cursor = conn.cursor()
try:
cursor.execute("INSERT INTO archives (archive_name) VALUES (?)", (archive_name,))
cursor.execute(
"INSERT INTO archives (archive_name) VALUES (?)", (archive_name,)
)
archive_id = cursor.lastrowid
cursor.executemany(
"INSERT INTO files (archive_id, file_name) VALUES (?, ?)",
Expand All @@ -50,6 +54,7 @@ def add_archive(archive_name: str, files: list[str]) -> None:
except sqlite3.IntegrityError:
logging.error(f"Archive '{archive_name}' already exists. Skipping.")


def get_archives() -> list[tuple[str, str]]:
"""
Retrieve a list of all archives and their file counts.
Expand Down Expand Up @@ -83,15 +88,19 @@ def delete_archive(archive_name: str) -> None:
with lock: # Ensure thread safety
with connect_database() as conn:
cursor = conn.cursor()
cursor.execute("SELECT id FROM archives WHERE archive_name = ?", (archive_name,))
cursor.execute(
"SELECT id FROM archives WHERE archive_name = ?", (archive_name,)
)
result = cursor.fetchone()

if not result:
logging.info(f"Archive '{archive_name}' not found.")
return

archive_id = result[0]
cursor.execute("SELECT file_name FROM files WHERE archive_id = ?", (archive_id,))
cursor.execute(
"SELECT file_name FROM files WHERE archive_id = ?", (archive_id,)
)
files = cursor.fetchall()

# Delete associated files from the filesystem
Expand Down Expand Up @@ -125,14 +134,14 @@ def does_archive_exist(archive_name: str, file_list: list[str]) -> bool:
with connect_database() as conn:
cursor = conn.cursor()
cursor.execute(
'''
"""
SELECT COUNT(*) FROM archives a
JOIN files f ON a.id = f.archive_id
WHERE a.archive_name = ?
GROUP BY a.id
HAVING COUNT(f.id) = ?
''',
""",
(archive_name, file_count),
)
result = cursor.fetchone()
return result is not None
return result is not None
7 changes: 5 additions & 2 deletions helper/config_operations.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
import configparser


def get_library_path():
config = _get_config_file()
return config["PATH"].get("LibraryPath").strip('\"')
return config["PATH"].get("LibraryPath").strip('"')


def get_debug_mode():
config = _get_config_file()
return config["DEBUG"].getboolean("DebugMode")


def _get_config_file():
config = configparser.ConfigParser()
config.read("config.ini")
return config
return config
8 changes: 5 additions & 3 deletions helper/file_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
from pathlib import Path, PurePath



def get_file_from_path(file_path):
return PurePath(file_path).name


def get_file_name_without_extension(file):
return file.rpartition(".")[0]

Expand Down Expand Up @@ -52,6 +52,7 @@ def delete_temp_folder() -> None:
if temp_path.exists():
shutil.rmtree(temp_path)


def get_file_size(file_path):
file = Path(file_path)
if not file.exists():
Expand Down Expand Up @@ -110,10 +111,11 @@ def create_logger() -> logging.Logger:
filename=str(log_file),
level=logging.INFO,
format="%(asctime)s - %(levelname)s - %(message)s",
datefmt='%m/%d/%Y %I:%M:%S'
datefmt="%m/%d/%Y %I:%M:%S",
)
return logging.getLogger(__name__)


def is_file_archive(file):
if file.lower().endswith(('.zip', '.rar', '.7z', '.tar')):
if file.lower().endswith((".zip", ".rar", ".7z", ".tar")):
return True
6 changes: 5 additions & 1 deletion helper/updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

api_url = "https://api.github.com/repos/Ati1707/DazContentInstaller/releases/latest"


def is_new_update_available(local_version):
response = urllib.request.urlopen(api_url).read()
data = json.loads(response)
Expand All @@ -16,5 +17,8 @@ def is_new_update_available(local_version):
return True
return False


def open_release_page():
webbrowser.open("https://github.com/Ati1707/DazContentInstaller/releases", new=0, autoraise=True)
webbrowser.open(
"https://github.com/Ati1707/DazContentInstaller/releases", new=0, autoraise=True
)
82 changes: 57 additions & 25 deletions installer.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import logging
import pathlib
import patoolib
import re
Expand All @@ -20,9 +19,21 @@

# Folders to target during extraction
TARGET_FOLDERS = [
"aniBlocks", "data", "Environments", "Light Presets", "People", "Props",
"ReadMe's", "Render Presets", "Render Settings", "Runtime", "Scenes",
"Scripts", "Shader Presets", "Cameras", "Documentation"
"aniBlocks",
"data",
"Environments",
"Light Presets",
"People",
"Props",
"ReadMe's",
"Render Presets",
"Render Settings",
"Runtime",
"Scenes",
"Scripts",
"Shader Presets",
"Cameras",
"Documentation",
]

# Determine base path based on execution context
Expand All @@ -46,9 +57,9 @@ def get_relative_path(full_path: str) -> str:
If the path contains a target folder, return the sub-path starting
from the target folder.
"""
pattern = r'|'.join([re.escape(folder) for folder in TARGET_FOLDERS])
pattern = r"|".join([re.escape(folder) for folder in TARGET_FOLDERS])
match = re.search(pattern, full_path)
return full_path[match.start():] if match else full_path
return full_path[match.start() :] if match else full_path


def clean_temp_folder() -> None:
Expand All @@ -65,7 +76,7 @@ def extract_archive(item_path: pathlib.Path, is_debug_mode: bool) -> bool:
Extract an archive into the temporary folder.
"""
base_item_name = item_path.name
if base_item_name.lower().endswith(('.zip', '.rar', '.7z', '.tar')):
if base_item_name.lower().endswith((".zip", ".rar", ".7z", ".tar")):
logger.info(f"Extracting {base_item_name}")
try:
verbosity = 2 if is_debug_mode else -1
Expand All @@ -74,7 +85,7 @@ def extract_archive(item_path: pathlib.Path, is_debug_mode: bool) -> bool:
outdir=str(TEMP_FOLDER),
verbosity=verbosity,
interactive=False,
program=str(SEVEN_ZIP_PATH)
program=str(SEVEN_ZIP_PATH),
)
time.sleep(1)
return True
Expand All @@ -98,15 +109,21 @@ def add_to_database(root_path: pathlib.Path, item: pathlib.Path) -> bool:
Add the extracted files to the content database.
"""
archive_name = item.stem.split(".")[0]
file_list = [get_relative_path(str(file_path)) for file_path in root_path.rglob("*") if file_path.is_file()]
file_list = [
get_relative_path(str(file_path))
for file_path in root_path.rglob("*")
if file_path.is_file()
]

if content_database.does_archive_exist(archive_name, file_list):
logger.info(f"Archive '{archive_name}' already exists in the database.")
global archive_exists
archive_exists = True
return True
else:
logger.info(f"Adding archive '{archive_name}' with {len(file_list)} files to the database.")
logger.info(
f"Adding archive '{archive_name}' with {len(file_list)} files to the database."
)
content_database.add_archive(archive_name, file_list)
time.sleep(1)
return False
Expand All @@ -119,7 +136,7 @@ def handle_nested_archives(root_path, files, is_debug_mode):
archive_extracted = False
for file in files:
file_path = root_path / file
if file.lower().endswith(('.zip', '.rar', '.7z', '.tar')):
if file.lower().endswith((".zip", ".rar", ".7z", ".tar")):
logger.info(f"Extracting nested archive: {file}")
try:
verbosity = 2 if is_debug_mode else -1
Expand All @@ -138,7 +155,9 @@ def handle_nested_archives(root_path, files, is_debug_mode):
return archive_extracted


def process_manifest_and_target_folders(root_path, dirs, files, progressbar, current_item):
def process_manifest_and_target_folders(
root_path, dirs, files, progressbar, current_item
):
"""
Check for manifest files and target folders, and process them accordingly.
"""
Expand All @@ -149,44 +168,55 @@ def process_manifest_and_target_folders(root_path, dirs, files, progressbar, cur
if manifest_exists and folder.lower().startswith("content"):
content_path = root_path / folder
clean_folder(content_path)
progressbar.set(progressbar.get() + 0.1)
progressbar.setValue(progressbar.value() + 0.1)
if add_to_database(content_path, current_item):
progressbar.set(progressbar.get() + 0.1)
progressbar.setValue(progressbar.value() + 0.1)
return False
shutil.copytree(content_path, get_library_path(), dirs_exist_ok=True)
return True

if any(target.lower() == folder.lower() for target in TARGET_FOLDERS):
clean_folder(root_path)
progressbar.set(progressbar.get() + 0.1)
progressbar.setValue(progressbar.value() + 0.1)
if add_to_database(root_path, current_item):
return False
shutil.copytree(root_path, get_library_path(), dirs_exist_ok=True)
return True
return False


def traverse_directory(folder_path: pathlib.Path, current_item: pathlib.Path, progressbar, is_debug_mode: bool):
def traverse_directory(
folder_path: pathlib.Path,
current_item: pathlib.Path,
progressbar,
is_debug_mode: bool,
):
"""
Traverse the directory structure and handle nested archives and target folders.
"""
for root, dirs, files in folder_path.walk():
root_path = pathlib.Path(root)

if handle_nested_archives(root_path, files, is_debug_mode):
progressbar.set(progressbar.get() + 0.1)
return traverse_directory(folder_path, current_item, progressbar, is_debug_mode)
if process_manifest_and_target_folders(root_path, dirs, files, progressbar, current_item):
progressbar.set(progressbar.get() + 0.1)
progressbar.setValue(progressbar.value() + 0.1)
return traverse_directory(
folder_path, current_item, progressbar, is_debug_mode
)
if process_manifest_and_target_folders(
root_path, dirs, files, progressbar, current_item
):
progressbar.setValue(progressbar.value() + 0.1)
return True
if archive_exists:
return False
progressbar.set(progressbar.get() + 0.1)
progressbar.setValue(progressbar.value() + 0.1)

return False


def start_installer_gui(file_path: str, progressbar, is_delete_archive: bool = False) -> bool:
def start_installer_gui(
file_path: str, progressbar, is_delete_archive: bool = False
) -> bool:
"""
Main function to handle the installation process via the GUI.
Expand All @@ -204,20 +234,22 @@ def start_installer_gui(file_path: str, progressbar, is_delete_archive: bool = F
logger.info(f"Installing {file_path}")
create_temp_folder()
clean_temp_folder()
progressbar.set(0.1)
progressbar.setValue(0.1)

if not extract_archive(file_path, get_debug_mode()):
clean_temp_folder()
return is_archive_imported

progressbar.set(0.4)
progressbar.setValue(0.4)

if traverse_directory(TEMP_FOLDER, file_path, progressbar, get_debug_mode()):
is_archive_imported = True
logger.info(f"Successfully imported: {file_path}")
else:
is_archive_imported = False
logger.warning(f"Failed to import {file_path}. Invalid folder structure or asset already exists.")
logger.warning(
f"Failed to import {file_path}. Invalid folder structure or asset already exists."
)

clean_temp_folder()
delete_temp_folder()
Expand Down
Loading

0 comments on commit 5f4b66f

Please sign in to comment.