Remove hardcoded libpython binaries and add debug step
All checks were successful
build / build-linux (push) Successful in 16s
All checks were successful
build / build-linux (push) Successful in 16s
This commit is contained in:
1343
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/__init__.py
Executable file
1343
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/__init__.py
Executable file
File diff suppressed because it is too large
Load Diff
401
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/conda.py
Executable file
401
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/conda.py
Executable file
@@ -0,0 +1,401 @@
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2005-2023, PyInstaller Development Team.
|
||||
#
|
||||
# Distributed under the terms of the GNU General Public License (version 2
|
||||
# or later) with exception for distributing the bootloader.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#
|
||||
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
# language=rst
|
||||
"""
|
||||
Additional helper methods for working specifically with Anaconda distributions are found at
|
||||
:mod:`PyInstaller.utils.hooks.conda_support<PyInstaller.utils.hooks.conda>`
|
||||
which is designed to mimic (albeit loosely) the `importlib.metadata`_ package. These functions find and parse the
|
||||
distribution metadata from json files located in the ``conda-meta`` directory.
|
||||
|
||||
.. versionadded:: 4.2.0
|
||||
|
||||
This module is available only if run inside a Conda environment. Usage of this module should therefore be wrapped in
|
||||
a conditional clause::
|
||||
|
||||
from PyInstaller.compat import is_pure_conda
|
||||
|
||||
if is_pure_conda:
|
||||
from PyInstaller.utils.hooks import conda_support
|
||||
|
||||
# Code goes here. e.g.
|
||||
binaries = conda_support.collect_dynamic_libs("numpy")
|
||||
...
|
||||
|
||||
Packages are all referenced by the *distribution name* you use to install it, rather than the *package name* you import
|
||||
it with. I.e., use ``distribution("pillow")`` instead of ``distribution("PIL")`` or use ``package_distribution("PIL")``.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import fnmatch
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
from typing import Iterable, List
|
||||
from importlib.metadata import PackagePath as _PackagePath
|
||||
|
||||
from PyInstaller import compat
|
||||
from PyInstaller.log import logger
|
||||
|
||||
# Conda virtual environments each get their own copy of `conda-meta` so the use of `sys.prefix` instead of
|
||||
# `sys.base_prefix`, `sys.real_prefix` or anything from our `compat` module is intentional.
|
||||
CONDA_ROOT = pathlib.Path(sys.prefix)
|
||||
CONDA_META_DIR = CONDA_ROOT / "conda-meta"
|
||||
|
||||
# Find all paths in `sys.path` that are inside Conda root.
|
||||
PYTHONPATH_PREFIXES = []
|
||||
for _path in sys.path:
|
||||
_path = pathlib.Path(_path)
|
||||
try:
|
||||
PYTHONPATH_PREFIXES.append(_path.relative_to(sys.prefix))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
PYTHONPATH_PREFIXES.sort(key=lambda p: len(p.parts), reverse=True)
|
||||
|
||||
|
||||
class Distribution:
|
||||
"""
|
||||
A bucket class representation of a Conda distribution.
|
||||
|
||||
This bucket exports the following attributes:
|
||||
|
||||
:ivar name: The distribution's name.
|
||||
:ivar version: Its version.
|
||||
:ivar files: All filenames as :meth:`PackagePath`\\ s included with this distribution.
|
||||
:ivar dependencies: Names of other distributions that this distribution depends on (with version constraints
|
||||
removed).
|
||||
:ivar packages: Names of importable packages included in this distribution.
|
||||
|
||||
This class is not intended to be constructed directly by users. Rather use :meth:`distribution` or
|
||||
:meth:`package_distribution` to provide one for you.
|
||||
"""
|
||||
def __init__(self, json_path: str):
|
||||
try:
|
||||
self._json_path = pathlib.Path(json_path)
|
||||
assert self._json_path.exists()
|
||||
except (TypeError, AssertionError):
|
||||
raise TypeError(
|
||||
"Distribution requires a path to a conda-meta json. Perhaps you want "
|
||||
"`distribution({})` instead?".format(repr(json_path))
|
||||
)
|
||||
|
||||
# Everything we need (including this distribution's name) is kept in the metadata json.
|
||||
self.raw: dict = json.loads(self._json_path.read_text())
|
||||
|
||||
# Unpack the more useful contents of the json.
|
||||
self.name: str = self.raw["name"]
|
||||
self.version: str = self.raw["version"]
|
||||
self.files = [PackagePath(i) for i in self.raw["files"]]
|
||||
self.dependencies = self._init_dependencies()
|
||||
self.packages = self._init_package_names()
|
||||
|
||||
def __repr__(self):
|
||||
return "{}(name=\"{}\", packages={})".format(type(self).__name__, self.name, self.packages)
|
||||
|
||||
def _init_dependencies(self):
|
||||
"""
|
||||
Read dependencies from ``self.raw["depends"]``.
|
||||
|
||||
:return: Dependent distribution names.
|
||||
:rtype: list
|
||||
|
||||
The names in ``self.raw["depends"]`` come with extra version constraint information which must be stripped.
|
||||
"""
|
||||
dependencies = []
|
||||
# For each dependency:
|
||||
for dependency in self.raw["depends"]:
|
||||
# ``dependency`` is a string of the form: "[name] [version constraints]"
|
||||
name, *version_constraints = dependency.split(maxsplit=1)
|
||||
dependencies.append(name)
|
||||
return dependencies
|
||||
|
||||
def _init_package_names(self):
|
||||
"""
|
||||
Search ``self.files`` for package names shipped by this distribution.
|
||||
|
||||
:return: Package names.
|
||||
:rtype: list
|
||||
|
||||
These are names you would ``import`` rather than names you would install.
|
||||
"""
|
||||
packages = []
|
||||
for file in self.files:
|
||||
package = _get_package_name(file)
|
||||
if package is not None:
|
||||
packages.append(package)
|
||||
return packages
|
||||
|
||||
@classmethod
|
||||
def from_name(cls, name: str):
|
||||
"""
|
||||
Get distribution information for a given distribution **name** (i.e., something you would ``conda install``).
|
||||
|
||||
:rtype: :class:`Distribution`
|
||||
"""
|
||||
if name in distributions:
|
||||
return distributions[name]
|
||||
raise ModuleNotFoundError(
|
||||
"Distribution {} is either not installed or was not installed using Conda.".format(name)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_package_name(cls, name: str):
|
||||
"""
|
||||
Get distribution information for a **package** (i.e., something you would import).
|
||||
|
||||
:rtype: :class:`Distribution`
|
||||
|
||||
For example, the package ``pkg_resources`` belongs to the distribution ``setuptools``, which contains three
|
||||
packages.
|
||||
|
||||
>>> package_distribution("pkg_resources")
|
||||
Distribution(name="setuptools",
|
||||
packages=['easy_install', 'pkg_resources', 'setuptools'])
|
||||
"""
|
||||
if name in distributions_by_package:
|
||||
return distributions_by_package[name]
|
||||
raise ModuleNotFoundError("Package {} is either not installed or was not installed using Conda.".format(name))
|
||||
|
||||
|
||||
distribution = Distribution.from_name
|
||||
package_distribution = Distribution.from_package_name
|
||||
|
||||
|
||||
class PackagePath(_PackagePath):
|
||||
"""
|
||||
A filename relative to Conda's root (``sys.prefix``).
|
||||
|
||||
This class inherits from :class:`pathlib.PurePosixPath` even on non-Posix OSs. To convert to a :class:`pathlib.Path`
|
||||
pointing to the real file, use the :meth:`locate` method.
|
||||
"""
|
||||
def locate(self):
|
||||
"""
|
||||
Return a path-like object for this path pointing to the file's true location.
|
||||
"""
|
||||
return pathlib.Path(sys.prefix) / self
|
||||
|
||||
|
||||
def walk_dependency_tree(initial: str, excludes: Iterable[str] | None = None):
|
||||
"""
|
||||
Collect a :class:`Distribution` and all direct and indirect dependencies of that distribution.
|
||||
|
||||
Arguments:
|
||||
initial:
|
||||
Distribution name to collect from.
|
||||
excludes:
|
||||
Distributions to exclude.
|
||||
Returns:
|
||||
A ``{name: distribution}`` mapping where ``distribution`` is the output of
|
||||
:func:`conda_support.distribution(name) <distribution>`.
|
||||
"""
|
||||
if excludes is not None:
|
||||
excludes = set(excludes)
|
||||
|
||||
# Rather than use true recursion, mimic it with a to-do queue.
|
||||
from collections import deque
|
||||
done = {}
|
||||
names_to_do = deque([initial])
|
||||
|
||||
while names_to_do:
|
||||
# Grab a distribution name from the to-do list.
|
||||
name = names_to_do.pop()
|
||||
try:
|
||||
# Collect and save it's metadata.
|
||||
done[name] = distribution = Distribution.from_name(name)
|
||||
logger.debug("Collected Conda distribution '%s', a dependency of '%s'.", name, initial)
|
||||
except ModuleNotFoundError:
|
||||
logger.warning(
|
||||
"Conda distribution '%s', dependency of '%s', was not found. "
|
||||
"If you installed this distribution with pip then you may ignore this warning.", name, initial
|
||||
)
|
||||
continue
|
||||
# For each dependency:
|
||||
for _name in distribution.dependencies:
|
||||
if _name in done:
|
||||
# Skip anything already done.
|
||||
continue
|
||||
if _name == name:
|
||||
# Avoid infinite recursion if a distribution depends on itself. This will probably never happen but I
|
||||
# certainly would not chance it.
|
||||
continue
|
||||
if excludes is not None and _name in excludes:
|
||||
# Do not recurse to excluded dependencies.
|
||||
continue
|
||||
names_to_do.append(_name)
|
||||
return done
|
||||
|
||||
|
||||
def _iter_distributions(name, dependencies, excludes):
|
||||
if dependencies:
|
||||
return walk_dependency_tree(name, excludes).values()
|
||||
else:
|
||||
return [Distribution.from_name(name)]
|
||||
|
||||
|
||||
def requires(name: str, strip_versions: bool = False) -> List[str]:
|
||||
"""
|
||||
List requirements of a distribution.
|
||||
|
||||
Arguments:
|
||||
name:
|
||||
The name of the distribution.
|
||||
strip_versions:
|
||||
List only their names, not their version constraints.
|
||||
Returns:
|
||||
A list of distribution names.
|
||||
"""
|
||||
if strip_versions:
|
||||
return distribution(name).dependencies
|
||||
return distribution(name).raw["depends"]
|
||||
|
||||
|
||||
def files(name: str, dependencies: bool = False, excludes: list | None = None) -> List[PackagePath]:
|
||||
"""
|
||||
List all files belonging to a distribution.
|
||||
|
||||
Arguments:
|
||||
name:
|
||||
The name of the distribution.
|
||||
dependencies:
|
||||
Recursively collect files of dependencies too.
|
||||
excludes:
|
||||
Distributions to ignore if **dependencies** is true.
|
||||
Returns:
|
||||
All filenames belonging to the given distribution.
|
||||
|
||||
With ``dependencies=False``, this is just a shortcut for::
|
||||
|
||||
conda_support.distribution(name).files
|
||||
"""
|
||||
return [file for dist in _iter_distributions(name, dependencies, excludes) for file in dist.files]
|
||||
|
||||
|
||||
if compat.is_win:
|
||||
lib_dir = pathlib.PurePath("Library", "bin")
|
||||
else:
|
||||
lib_dir = pathlib.PurePath("lib")
|
||||
|
||||
|
||||
def collect_dynamic_libs(name: str, dest: str = ".", dependencies: bool = True, excludes: Iterable[str] | None = None):
|
||||
"""
|
||||
Collect DLLs for distribution **name**.
|
||||
|
||||
Arguments:
|
||||
name:
|
||||
The distribution's project-name.
|
||||
dest:
|
||||
Target destination, defaults to ``'.'``.
|
||||
dependencies:
|
||||
Recursively collect libs for dependent distributions (recommended).
|
||||
excludes:
|
||||
Dependent distributions to skip, defaults to ``None``.
|
||||
Returns:
|
||||
List of DLLs in PyInstaller's ``(source, dest)`` format.
|
||||
|
||||
This collects libraries only from Conda's shared ``lib`` (Unix) or ``Library/bin`` (Windows) folders. To collect
|
||||
from inside a distribution's installation use the regular :func:`PyInstaller.utils.hooks.collect_dynamic_libs`.
|
||||
"""
|
||||
DLL_SUFFIXES = ("*.dll", "*.dylib", "*.so", "*.so.*")
|
||||
_files = []
|
||||
for file in files(name, dependencies, excludes):
|
||||
# A file is classified as a dynamic library if:
|
||||
# 1) it lives inside the dedicated ``lib_dir`` DLL folder.
|
||||
#
|
||||
# NOTE: `file` is an instance of `PackagePath`, which inherits from `pathlib.PurePosixPath` even on Windows.
|
||||
# Therefore, it does not properly handle cases when metadata paths contain Windows-style separator, which does
|
||||
# seem to be used on some Windows installations (see #9113). Therefore, cast `file` to `pathlib.PurePath`
|
||||
# before comparing its parent to `lib_dir` (which should also be a `pathlib.PurePath`).
|
||||
if pathlib.PurePath(file).parent != lib_dir:
|
||||
continue
|
||||
# 2) it is a file (and not a directory or a symbolic link pointing to a directory)
|
||||
resolved_file = file.locate()
|
||||
if not resolved_file.is_file():
|
||||
continue
|
||||
# 3) has a correct suffix
|
||||
if not any([resolved_file.match(suffix) for suffix in DLL_SUFFIXES]):
|
||||
continue
|
||||
|
||||
_files.append((str(resolved_file), dest))
|
||||
return _files
|
||||
|
||||
|
||||
# --- Map packages to distributions and vice-versa ---
|
||||
|
||||
|
||||
def _get_package_name(file: PackagePath):
|
||||
"""
|
||||
Determine the package name of a Python file in :data:`sys.path`.
|
||||
|
||||
Arguments:
|
||||
file:
|
||||
A Python filename relative to Conda root (sys.prefix).
|
||||
Returns:
|
||||
Package name or None.
|
||||
|
||||
This function only considers single file packages e.g. ``foo.py`` or top level ``foo/__init__.py``\\ s.
|
||||
Anything else is ignored (returning ``None``).
|
||||
"""
|
||||
file = pathlib.Path(file)
|
||||
# TODO: Handle PEP 420 namespace packages (which are missing `__init__` module). No such Conda PEP 420 namespace
|
||||
# packages are known.
|
||||
|
||||
# Get top-level folders by finding parents of `__init__.xyz`s
|
||||
if file.stem == "__init__" and file.suffix in compat.ALL_SUFFIXES:
|
||||
file = file.parent
|
||||
elif file.suffix not in compat.ALL_SUFFIXES:
|
||||
# Keep single-file packages but skip DLLs, data and junk files.
|
||||
return
|
||||
|
||||
# Check if this file/folder's parent is in ``sys.path`` i.e. it's directly importable. This intentionally excludes
|
||||
# submodules which would cause confusion because ``sys.prefix`` is in ``sys.path``, meaning that every file in an
|
||||
# Conda installation is a submodule.
|
||||
for prefix in PYTHONPATH_PREFIXES:
|
||||
if len(file.parts) != len(prefix.parts) + 1:
|
||||
# This check is redundant but speeds it up quite a bit.
|
||||
continue
|
||||
# There are no wildcards involved here. The use of ``fnmatch`` is simply to handle the `if case-insensitive
|
||||
# file system: use case-insensitive string matching.`
|
||||
if fnmatch.fnmatch(str(file.parent), str(prefix)):
|
||||
return file.stem
|
||||
|
||||
|
||||
# All the information we want is organised the wrong way.
|
||||
|
||||
# We want to look up distribution based on package names, but we can only search for packages using distribution names.
|
||||
# And we would like to search for a distribution's json file, but, due to the noisy filenames of the jsons, we can only
|
||||
# find a json's distribution rather than a distribution's json.
|
||||
|
||||
# So we have to read everything, then regroup distributions in the ways we want them grouped. This will likely be a
|
||||
# spectacular bottleneck on full-blown Conda (non miniconda) with 250+ packages by default at several GiBs. I suppose we
|
||||
# could cache this on a per-json basis if it gets too much.
|
||||
|
||||
|
||||
def _init_distributions():
|
||||
distributions = {}
|
||||
for path in CONDA_META_DIR.glob("*.json"):
|
||||
dist = Distribution(path)
|
||||
distributions[dist.name] = dist
|
||||
return distributions
|
||||
|
||||
|
||||
distributions = _init_distributions()
|
||||
|
||||
|
||||
def _init_packages():
|
||||
distributions_by_package = {}
|
||||
for distribution in distributions.values():
|
||||
for package in distribution.packages:
|
||||
distributions_by_package[package] = distribution
|
||||
return distributions_by_package
|
||||
|
||||
|
||||
distributions_by_package = _init_packages()
|
||||
152
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/django.py
Executable file
152
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/django.py
Executable file
@@ -0,0 +1,152 @@
|
||||
# ----------------------------------------------------------------------------
|
||||
# Copyright (c) 2005-2023, PyInstaller Development Team.
|
||||
#
|
||||
# Distributed under the terms of the GNU General Public License (version 2
|
||||
# or later) with exception for distributing the bootloader.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#
|
||||
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
|
||||
# ----------------------------------------------------------------------------
|
||||
import os
|
||||
|
||||
from PyInstaller import isolated
|
||||
|
||||
|
||||
@isolated.decorate
|
||||
def django_dottedstring_imports(django_root_dir):
|
||||
"""
|
||||
An isolated helper that returns list of all Django dependencies, parsed from the `mysite.settings` module.
|
||||
|
||||
NOTE: With newer version of Django this is most likely the part of PyInstaller that will be broken.
|
||||
|
||||
Tested with Django 2.2
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
import PyInstaller.utils.misc
|
||||
from PyInstaller.utils import hooks as hookutils
|
||||
|
||||
# Extra search paths to add to sys.path:
|
||||
# - parent directory of the django_root_dir
|
||||
# - django_root_dir itself; often, Django users do not specify absolute imports in the settings module.
|
||||
search_paths = [
|
||||
PyInstaller.utils.misc.get_path_to_toplevel_modules(django_root_dir),
|
||||
django_root_dir,
|
||||
]
|
||||
sys.path += search_paths
|
||||
|
||||
# Set the path to project's settings module
|
||||
default_settings_module = os.path.basename(django_root_dir) + '.settings'
|
||||
settings_module = os.environ.get('DJANGO_SETTINGS_MODULE', default_settings_module)
|
||||
os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
|
||||
|
||||
# Calling django.setup() avoids the exception AppRegistryNotReady() and also reads the user settings
|
||||
# from DJANGO_SETTINGS_MODULE.
|
||||
# https://stackoverflow.com/questions/24793351/django-appregistrynotready
|
||||
import django # noqa: E402
|
||||
|
||||
django.setup()
|
||||
|
||||
# This allows to access all django settings even from the settings.py module.
|
||||
from django.conf import settings # noqa: E402
|
||||
|
||||
hiddenimports = list(settings.INSTALLED_APPS)
|
||||
|
||||
# Do not fail script when settings does not have such attributes.
|
||||
if hasattr(settings, 'TEMPLATE_CONTEXT_PROCESSORS'):
|
||||
hiddenimports += list(settings.TEMPLATE_CONTEXT_PROCESSORS)
|
||||
|
||||
if hasattr(settings, 'TEMPLATE_LOADERS'):
|
||||
hiddenimports += list(settings.TEMPLATE_LOADERS)
|
||||
|
||||
hiddenimports += [settings.ROOT_URLCONF]
|
||||
|
||||
def _remove_class(class_name):
|
||||
return '.'.join(class_name.split('.')[0:-1])
|
||||
|
||||
#-- Changes in Django 1.7.
|
||||
|
||||
# Remove class names and keep just modules.
|
||||
if hasattr(settings, 'AUTHENTICATION_BACKENDS'):
|
||||
for cl in settings.AUTHENTICATION_BACKENDS:
|
||||
cl = _remove_class(cl)
|
||||
hiddenimports.append(cl)
|
||||
# Deprecated since 4.2, may be None until it is removed
|
||||
cl = getattr(settings, 'DEFAULT_FILE_STORAGE', None)
|
||||
if cl:
|
||||
hiddenimports.append(_remove_class(cl))
|
||||
if hasattr(settings, 'FILE_UPLOAD_HANDLERS'):
|
||||
for cl in settings.FILE_UPLOAD_HANDLERS:
|
||||
cl = _remove_class(cl)
|
||||
hiddenimports.append(cl)
|
||||
if hasattr(settings, 'MIDDLEWARE_CLASSES'):
|
||||
for cl in settings.MIDDLEWARE_CLASSES:
|
||||
cl = _remove_class(cl)
|
||||
hiddenimports.append(cl)
|
||||
# Templates is a dict:
|
||||
if hasattr(settings, 'TEMPLATES'):
|
||||
for templ in settings.TEMPLATES:
|
||||
backend = _remove_class(templ['BACKEND'])
|
||||
hiddenimports.append(backend)
|
||||
# Include context_processors.
|
||||
if hasattr(templ, 'OPTIONS'):
|
||||
if hasattr(templ['OPTIONS'], 'context_processors'):
|
||||
# Context processors are functions - strip last word.
|
||||
mods = templ['OPTIONS']['context_processors']
|
||||
mods = [_remove_class(x) for x in mods]
|
||||
hiddenimports += mods
|
||||
# Include database backends - it is a dict.
|
||||
for v in settings.DATABASES.values():
|
||||
hiddenimports.append(v['ENGINE'])
|
||||
|
||||
# Add templatetags and context processors for each installed app.
|
||||
for app in settings.INSTALLED_APPS:
|
||||
app_templatetag_module = app + '.templatetags'
|
||||
app_ctx_proc_module = app + '.context_processors'
|
||||
hiddenimports.append(app_templatetag_module)
|
||||
hiddenimports += hookutils.collect_submodules(app_templatetag_module)
|
||||
hiddenimports.append(app_ctx_proc_module)
|
||||
|
||||
# Deduplicate imports.
|
||||
hiddenimports = list(set(hiddenimports))
|
||||
|
||||
# Return the hidden imports
|
||||
return hiddenimports
|
||||
|
||||
|
||||
def django_find_root_dir():
|
||||
"""
|
||||
Return path to directory (top-level Python package) that contains main django files. Return None if no directory
|
||||
was detected.
|
||||
|
||||
Main Django project directory contain files like '__init__.py', 'settings.py' and 'url.py'.
|
||||
|
||||
In Django 1.4+ the script 'manage.py' is not in the directory with 'settings.py' but usually one level up. We
|
||||
need to detect this special case too.
|
||||
"""
|
||||
# 'PyInstaller.config' cannot be imported as other top-level modules.
|
||||
from PyInstaller.config import CONF
|
||||
|
||||
# Get the directory with manage.py. Manage.py is supplied to PyInstaller as the first main executable script.
|
||||
manage_py = CONF['main_script']
|
||||
manage_dir = os.path.dirname(os.path.abspath(manage_py))
|
||||
|
||||
# Get the Django root directory. The directory that contains settings.py and url.py. It could be the directory
|
||||
# containing manage.py or any of its subdirectories.
|
||||
settings_dir = None
|
||||
files = set(os.listdir(manage_dir))
|
||||
if ('settings.py' in files or 'settings' in files) and 'urls.py' in files:
|
||||
settings_dir = manage_dir
|
||||
else:
|
||||
for f in files:
|
||||
if os.path.isdir(os.path.join(manage_dir, f)):
|
||||
subfiles = os.listdir(os.path.join(manage_dir, f))
|
||||
# Subdirectory contains critical files.
|
||||
if ('settings.py' in subfiles or 'settings' in subfiles) and 'urls.py' in subfiles:
|
||||
settings_dir = os.path.join(manage_dir, f)
|
||||
break # Find the first directory.
|
||||
|
||||
return settings_dir
|
||||
457
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/gi.py
Executable file
457
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/gi.py
Executable file
@@ -0,0 +1,457 @@
|
||||
# ----------------------------------------------------------------------------
|
||||
# Copyright (c) 2005-2023, PyInstaller Development Team.
|
||||
#
|
||||
# Distributed under the terms of the GNU General Public License (version 2
|
||||
# or later) with exception for distributing the bootloader.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#
|
||||
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
|
||||
# ----------------------------------------------------------------------------
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import subprocess
|
||||
import hashlib
|
||||
import re
|
||||
|
||||
from PyInstaller.depend.utils import _resolveCtypesImports
|
||||
from PyInstaller.utils.hooks import collect_submodules, collect_system_data_files, get_hook_config
|
||||
from PyInstaller import isolated
|
||||
from PyInstaller import log as logging
|
||||
from PyInstaller import compat
|
||||
from PyInstaller.depend.bindepend import findSystemLibrary
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GiModuleInfo:
|
||||
def __init__(self, module, version, hook_api=None):
|
||||
self.name = module
|
||||
self.version = version
|
||||
self.available = False
|
||||
self.sharedlibs = []
|
||||
self.typelib = None
|
||||
self.dependencies = []
|
||||
|
||||
# If hook API is available, use it to override the version from hookconfig.
|
||||
if hook_api is not None:
|
||||
module_versions = get_hook_config(hook_api, 'gi', 'module-versions')
|
||||
if module_versions:
|
||||
self.version = module_versions.get(module, version)
|
||||
|
||||
logger.debug("Gathering GI module info for %s %s", module, self.version)
|
||||
|
||||
@isolated.decorate
|
||||
def _get_module_info(module, version):
|
||||
import gi
|
||||
|
||||
# Ideally, we would use gi.Repository, which provides common abstraction for some of the functions we use in
|
||||
# this codepath (e.g., `require`, `get_typelib_path`, `get_immediate_dependencies`). However, it lacks the
|
||||
# `get_shared_library` function, which is why we are using "full" bindings via `gi.repository.GIRepository`.
|
||||
#
|
||||
# PyGObject 3.52.0 switched from girepository-1.0 to girepository-2.0, which means that GIRepository version
|
||||
# has changed from 2.0 to 3.0 and some of the API has changed.
|
||||
try:
|
||||
gi.require_version("GIRepository", "3.0")
|
||||
new_api = True
|
||||
except ValueError:
|
||||
gi.require_version("GIRepository", "2.0")
|
||||
new_api = False
|
||||
|
||||
from gi.repository import GIRepository
|
||||
|
||||
# The old API had `get_default` method to obtain global singleton object; it was removed in the new API,
|
||||
# which requires creation of separate GIRepository instances.
|
||||
if new_api:
|
||||
repo = GIRepository.Repository()
|
||||
try:
|
||||
repo.require(module, version, GIRepository.RepositoryLoadFlags.LAZY)
|
||||
except ValueError:
|
||||
return None # Module not available
|
||||
|
||||
# The new API returns the list of shared libraries.
|
||||
sharedlibs = repo.get_shared_libraries(module)
|
||||
else:
|
||||
repo = GIRepository.Repository.get_default()
|
||||
try:
|
||||
repo.require(module, version, GIRepository.RepositoryLoadFlags.IREPOSITORY_LOAD_FLAG_LAZY)
|
||||
except ValueError:
|
||||
return None # Module not available
|
||||
|
||||
# Shared library/libraries
|
||||
# Comma-separated list of paths to shared libraries, or None if none are associated. Convert to list.
|
||||
sharedlibs = repo.get_shared_library(module)
|
||||
sharedlibs = [lib.strip() for lib in sharedlibs.split(",")] if sharedlibs else []
|
||||
|
||||
# Path to .typelib file
|
||||
typelib = repo.get_typelib_path(module)
|
||||
|
||||
# Dependencies
|
||||
# GIRepository.Repository.get_immediate_dependencies is available from gobject-introspection v1.44 on
|
||||
if hasattr(repo, 'get_immediate_dependencies'):
|
||||
dependencies = repo.get_immediate_dependencies(module)
|
||||
else:
|
||||
dependencies = repo.get_dependencies(module)
|
||||
|
||||
return {
|
||||
'sharedlibs': sharedlibs,
|
||||
'typelib': typelib,
|
||||
'dependencies': dependencies,
|
||||
}
|
||||
|
||||
# Try to query information; if this fails, mark module as unavailable.
|
||||
try:
|
||||
info = _get_module_info(module, self.version)
|
||||
if info is None:
|
||||
logger.debug("GI module info %s %s not found.", module, self.version)
|
||||
else:
|
||||
logger.debug("GI module info %s %s found.", module, self.version)
|
||||
self.sharedlibs = info['sharedlibs']
|
||||
self.typelib = info['typelib']
|
||||
self.dependencies = info['dependencies']
|
||||
self.available = True
|
||||
except Exception as e:
|
||||
logger.warning("Failed to query GI module %s %s: %s", module, self.version, e)
|
||||
|
||||
def get_libdir(self):
|
||||
"""
|
||||
Return the path to shared library used by the module. If no libraries are associated with the typelib, None is
|
||||
returned. If multiple library names are associated with the typelib, the path to the first resolved shared
|
||||
library is returned. Raises exception if module is unavailable or none of the shared libraries could be
|
||||
resolved.
|
||||
"""
|
||||
# Module unavailable
|
||||
if not self.available:
|
||||
raise ValueError(f"Module {self.name} {self.version} is unavailable!")
|
||||
# Module has no associated shared libraries
|
||||
if not self.sharedlibs:
|
||||
return None
|
||||
for lib in self.sharedlibs:
|
||||
path = findSystemLibrary(lib)
|
||||
if path:
|
||||
return os.path.normpath(os.path.dirname(path))
|
||||
raise ValueError(f"Could not resolve any shared library of {self.name} {self.version}: {self.sharedlibs}!")
|
||||
|
||||
def collect_typelib_data(self):
|
||||
"""
|
||||
Return a tuple of (binaries, datas, hiddenimports) to be used by PyGObject related hooks.
|
||||
"""
|
||||
datas = []
|
||||
binaries = []
|
||||
hiddenimports = []
|
||||
|
||||
logger.debug("Collecting module data for %s %s", self.name, self.version)
|
||||
|
||||
# Module unavailable
|
||||
if not self.available:
|
||||
raise ValueError(f"Module {self.name} {self.version} is unavailable!")
|
||||
|
||||
# Find shared libraries
|
||||
resolved_libs = _resolveCtypesImports(self.sharedlibs)
|
||||
for resolved_lib in resolved_libs:
|
||||
logger.debug("Collecting shared library %s at %s", resolved_lib[0], resolved_lib[1])
|
||||
binaries.append((resolved_lib[1], "."))
|
||||
|
||||
# Find and collect .typelib file. Run it through the `gir_library_path_fix` to fix the library path, if
|
||||
# necessary.
|
||||
typelib_entry = gir_library_path_fix(self.typelib)
|
||||
if typelib_entry:
|
||||
logger.debug('Collecting gir typelib at %s', typelib_entry[0])
|
||||
datas.append(typelib_entry)
|
||||
|
||||
# Overrides for the module
|
||||
hiddenimports += collect_submodules('gi.overrides', lambda name: name.endswith('.' + self.name))
|
||||
|
||||
# Module dependencies
|
||||
for dep in self.dependencies:
|
||||
dep_module, _ = dep.rsplit('-', 1)
|
||||
hiddenimports += [f'gi.repository.{dep_module}']
|
||||
|
||||
return binaries, datas, hiddenimports
|
||||
|
||||
|
||||
# The old function, provided for backwards compatibility in 3rd party hooks.
|
||||
def get_gi_libdir(module, version):
|
||||
module_info = GiModuleInfo(module, version)
|
||||
return module_info.get_libdir()
|
||||
|
||||
|
||||
# The old function, provided for backwards compatibility in 3rd party hooks.
|
||||
def get_gi_typelibs(module, version):
|
||||
"""
|
||||
Return a tuple of (binaries, datas, hiddenimports) to be used by PyGObject related hooks. Searches for and adds
|
||||
dependencies recursively.
|
||||
|
||||
:param module: GI module name, as passed to 'gi.require_version()'
|
||||
:param version: GI module version, as passed to 'gi.require_version()'
|
||||
"""
|
||||
module_info = GiModuleInfo(module, version)
|
||||
return module_info.collect_typelib_data()
|
||||
|
||||
|
||||
def gir_library_path_fix(path):
|
||||
import subprocess
|
||||
|
||||
# 'PyInstaller.config' cannot be imported as other top-level modules.
|
||||
from PyInstaller.config import CONF
|
||||
|
||||
path = os.path.abspath(path)
|
||||
|
||||
# On macOS we need to recompile the GIR files to reference the loader path,
|
||||
# but this is not necessary on other platforms.
|
||||
if compat.is_darwin:
|
||||
|
||||
# If using a virtualenv, the base prefix and the path of the typelib
|
||||
# have really nothing to do with each other, so try to detect that.
|
||||
common_path = os.path.commonprefix([compat.base_prefix, path])
|
||||
if common_path == '/':
|
||||
logger.debug("virtualenv detected? fixing the gir path...")
|
||||
common_path = os.path.abspath(os.path.join(path, '..', '..', '..'))
|
||||
|
||||
gir_path = os.path.join(common_path, 'share', 'gir-1.0')
|
||||
|
||||
typelib_name = os.path.basename(path)
|
||||
gir_name = os.path.splitext(typelib_name)[0] + '.gir'
|
||||
|
||||
gir_file = os.path.join(gir_path, gir_name)
|
||||
|
||||
if not os.path.exists(gir_path):
|
||||
logger.error(
|
||||
"Unable to find gir directory: %s.\nTry installing your platform's gobject-introspection package.",
|
||||
gir_path
|
||||
)
|
||||
return None
|
||||
if not os.path.exists(gir_file):
|
||||
logger.error(
|
||||
"Unable to find gir file: %s.\nTry installing your platform's gobject-introspection package.", gir_file
|
||||
)
|
||||
return None
|
||||
|
||||
with open(gir_file, 'r', encoding='utf-8') as f:
|
||||
lines = f.readlines()
|
||||
# GIR files are `XML encoded <https://developer.gnome.org/gi/stable/gi-gir-reference.html>`_,
|
||||
# which means they are by definition encoded using UTF-8.
|
||||
with open(os.path.join(CONF['workpath'], gir_name), 'w', encoding='utf-8') as f:
|
||||
for line in lines:
|
||||
if 'shared-library' in line:
|
||||
split = re.split('(=)', line)
|
||||
files = re.split('(["|,])', split[2])
|
||||
for count, item in enumerate(files):
|
||||
if 'lib' in item:
|
||||
files[count] = '@loader_path/' + os.path.basename(item)
|
||||
line = ''.join(split[0:2]) + ''.join(files)
|
||||
f.write(line)
|
||||
|
||||
# g-ir-compiler expects a file so we cannot just pipe the fixed file to it.
|
||||
command = subprocess.Popen((
|
||||
'g-ir-compiler', os.path.join(CONF['workpath'], gir_name),
|
||||
'-o', os.path.join(CONF['workpath'], typelib_name)
|
||||
)) # yapf: disable
|
||||
command.wait()
|
||||
|
||||
return os.path.join(CONF['workpath'], typelib_name), 'gi_typelibs'
|
||||
else:
|
||||
return path, 'gi_typelibs'
|
||||
|
||||
|
||||
@isolated.decorate
|
||||
def get_glib_system_data_dirs():
|
||||
import gi
|
||||
gi.require_version('GLib', '2.0')
|
||||
from gi.repository import GLib
|
||||
return GLib.get_system_data_dirs()
|
||||
|
||||
|
||||
def get_glib_sysconf_dirs():
|
||||
"""
|
||||
Try to return the sysconf directories (e.g., /etc).
|
||||
"""
|
||||
if compat.is_win:
|
||||
# On Windows, if you look at gtkwin32.c, sysconfdir is actually relative to the location of the GTK DLL. Since
|
||||
# that is what we are actually interested in (not the user path), we have to do that the hard way...
|
||||
return [os.path.join(get_gi_libdir('GLib', '2.0'), 'etc')]
|
||||
|
||||
@isolated.call
|
||||
def data_dirs():
|
||||
import gi
|
||||
gi.require_version('GLib', '2.0')
|
||||
from gi.repository import GLib
|
||||
return GLib.get_system_config_dirs()
|
||||
|
||||
return data_dirs
|
||||
|
||||
|
||||
def collect_glib_share_files(*path):
|
||||
"""
|
||||
Path is relative to the system data directory (e.g., /usr/share).
|
||||
"""
|
||||
glib_data_dirs = get_glib_system_data_dirs()
|
||||
if glib_data_dirs is None:
|
||||
return []
|
||||
|
||||
destdir = os.path.join('share', *path)
|
||||
|
||||
# TODO: will this return too much?
|
||||
collected = []
|
||||
for data_dir in glib_data_dirs:
|
||||
p = os.path.join(data_dir, *path)
|
||||
collected += collect_system_data_files(p, destdir=destdir, include_py_files=False)
|
||||
|
||||
return collected
|
||||
|
||||
|
||||
def collect_glib_etc_files(*path):
|
||||
"""
|
||||
Path is relative to the system config directory (e.g., /etc).
|
||||
"""
|
||||
glib_config_dirs = get_glib_sysconf_dirs()
|
||||
if glib_config_dirs is None:
|
||||
return []
|
||||
|
||||
destdir = os.path.join('etc', *path)
|
||||
|
||||
# TODO: will this return too much?
|
||||
collected = []
|
||||
for config_dir in glib_config_dirs:
|
||||
p = os.path.join(config_dir, *path)
|
||||
collected += collect_system_data_files(p, destdir=destdir, include_py_files=False)
|
||||
|
||||
return collected
|
||||
|
||||
|
||||
_glib_translations = None
|
||||
|
||||
|
||||
def collect_glib_translations(prog, lang_list=None):
|
||||
"""
|
||||
Return a list of translations in the system locale directory whose names equal prog.mo.
|
||||
"""
|
||||
global _glib_translations
|
||||
if _glib_translations is None:
|
||||
if lang_list is not None:
|
||||
trans = []
|
||||
for lang in lang_list:
|
||||
trans += collect_glib_share_files(os.path.join("locale", lang))
|
||||
_glib_translations = trans
|
||||
else:
|
||||
_glib_translations = collect_glib_share_files('locale')
|
||||
|
||||
names = [os.sep + prog + '.mo', os.sep + prog + '.po']
|
||||
namelen = len(names[0])
|
||||
|
||||
return [(src, dst) for src, dst in _glib_translations if src[-namelen:] in names]
|
||||
|
||||
|
||||
# Not a hook utility function per-se (used by main Analysis class), but kept here to have all GLib/GObject functions
|
||||
# in one place...
|
||||
def compile_glib_schema_files(datas_toc, workdir, collect_source_files=False):
|
||||
"""
|
||||
Compile collected GLib schema files. Extracts the list of GLib schema files from the given input datas TOC, copies
|
||||
them to temporary working directory, and compiles them. The resulting `gschemas.compiled` file is added to the
|
||||
output TOC, replacing any existing entry with that name. If `collect_source_files` flag is set, the source XML
|
||||
schema files are also (re)added to the output TOC; by default, they are not. This function is no-op (returns the
|
||||
original TOC) if no GLib schemas are found in TOC or if `glib-compile-schemas` executable is not found in `PATH`.
|
||||
"""
|
||||
SCHEMA_DEST_DIR = pathlib.PurePath("share/glib-2.0/schemas")
|
||||
workdir = pathlib.Path(workdir)
|
||||
|
||||
schema_files = []
|
||||
output_toc = []
|
||||
for toc_entry in datas_toc:
|
||||
dest_name, src_name, typecode = toc_entry
|
||||
dest_name = pathlib.PurePath(dest_name)
|
||||
src_name = pathlib.PurePath(src_name)
|
||||
|
||||
# Pass-through for non-schema files, identified based on the destination directory.
|
||||
if dest_name.parent != SCHEMA_DEST_DIR:
|
||||
output_toc.append(toc_entry)
|
||||
continue
|
||||
|
||||
# It seems schemas directory contains different files with different suffices:
|
||||
# - .gschema.xml
|
||||
# - .schema.override
|
||||
# - .enums.xml
|
||||
# To avoid omitting anything, simply collect everything into temporary directory.
|
||||
# Exemptions are gschema.dtd (which should be unnecessary) and gschemas.compiled (which we will generate
|
||||
# ourselves in this function).
|
||||
if src_name.name in {"gschema.dtd", "gschemas.compiled"}:
|
||||
continue
|
||||
|
||||
schema_files.append(src_name)
|
||||
|
||||
# If there are no schema files available, simply return the input datas TOC.
|
||||
if not schema_files:
|
||||
return datas_toc
|
||||
|
||||
# Ensure that `glib-compile-schemas` executable is in PATH, just in case...
|
||||
schema_compiler_exe = shutil.which('glib-compile-schemas')
|
||||
if not schema_compiler_exe:
|
||||
logger.warning("GLib schema compiler (glib-compile-schemas) not found! Skipping GLib schema recompilation...")
|
||||
return datas_toc
|
||||
|
||||
# If `gschemas.compiled` file already exists in the temporary working directory, record its modification time and
|
||||
# hash. This will allow us to restore the modification time on the newly-compiled copy, if the latter turns out
|
||||
# to be identical to the existing old one. Just in case, if the file becomes subject to timestamp-based caching
|
||||
# mechanism.
|
||||
compiled_file = workdir / "gschemas.compiled"
|
||||
old_compiled_file_hash = None
|
||||
old_compiled_file_stat = None
|
||||
|
||||
if compiled_file.is_file():
|
||||
# Record creation/modification time
|
||||
old_compiled_file_stat = compiled_file.stat()
|
||||
# Compute SHA1 hash; since compiled schema files are relatively small, do it in single step.
|
||||
old_compiled_file_hash = hashlib.sha1(compiled_file.read_bytes()).digest()
|
||||
|
||||
# Ensure that temporary working directory exists, and is empty.
|
||||
if workdir.exists():
|
||||
shutil.rmtree(workdir)
|
||||
workdir.mkdir(exist_ok=True)
|
||||
|
||||
# Copy schema (source) files to temporary working directory
|
||||
for schema_file in schema_files:
|
||||
shutil.copy(schema_file, workdir)
|
||||
|
||||
# Compile. The glib-compile-schema might produce warnings on its own (e.g., schemas using deprecated paths, or
|
||||
# overrides for non-existent keys). Since these are non-actionable, capture and display them only as a DEBUG
|
||||
# message, or as a WARNING one if the command fails.
|
||||
logger.info("Compiling collected GLib schema files in %r...", str(workdir))
|
||||
try:
|
||||
cmd_args = [schema_compiler_exe, str(workdir), '--targetdir', str(workdir)]
|
||||
p = subprocess.run(
|
||||
cmd_args,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
check=True,
|
||||
errors='ignore',
|
||||
encoding='utf-8',
|
||||
)
|
||||
logger.debug("Output from glib-compile-schemas:\n%s", p.stdout)
|
||||
except subprocess.CalledProcessError as e:
|
||||
# The called glib-compile-schema returned error. Display stdout/stderr, and return original datas TOC to
|
||||
# minimize damage.
|
||||
logger.warning("Failed to recompile GLib schemas! Returning collected files as-is!", exc_info=True)
|
||||
logger.warning("Output from glib-compile-schemas:\n%s", e.stdout)
|
||||
return datas_toc
|
||||
except Exception:
|
||||
# Compilation failed for whatever reason. Return original datas TOC to minimize damage.
|
||||
logger.warning("Failed to recompile GLib schemas! Returning collected files as-is!", exc_info=True)
|
||||
return datas_toc
|
||||
|
||||
# Compute the checksum of the new compiled file, and if it matches the old checksum, restore the modification time.
|
||||
if old_compiled_file_hash is not None:
|
||||
new_compiled_file_hash = hashlib.sha1(compiled_file.read_bytes()).digest()
|
||||
if new_compiled_file_hash == old_compiled_file_hash:
|
||||
os.utime(compiled_file, ns=(old_compiled_file_stat.st_atime_ns, old_compiled_file_stat.st_mtime_ns))
|
||||
|
||||
# Add the resulting gschemas.compiled file to the output TOC
|
||||
output_toc.append((str(SCHEMA_DEST_DIR / compiled_file.name), str(compiled_file), "DATA"))
|
||||
|
||||
# Include source schema files in the output TOC (optional)
|
||||
if collect_source_files:
|
||||
for schema_file in schema_files:
|
||||
output_toc.append((str(SCHEMA_DEST_DIR / schema_file.name), str(schema_file), "DATA"))
|
||||
|
||||
return output_toc
|
||||
1427
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/qt/__init__.py
Executable file
1427
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/qt/__init__.py
Executable file
File diff suppressed because it is too large
Load Diff
450
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/qt/_modules_info.py
Executable file
450
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/qt/_modules_info.py
Executable file
@@ -0,0 +1,450 @@
|
||||
# ----------------------------------------------------------------------------
|
||||
# Copyright (c) 2022-2023, PyInstaller Development Team.
|
||||
#
|
||||
# Distributed under the terms of the GNU General Public License (version 2
|
||||
# or later) with exception for distributing the bootloader.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#
|
||||
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
# Qt modules information - the core of our Qt collection approach
|
||||
# ----------------------------------------------------------------
|
||||
#
|
||||
# The python bindings for Qt (``PySide2``, ``PyQt5``, ``PySide6``, ``PyQt6``) consist of several python binary extension
|
||||
# modules that provide bindings for corresponding Qt modules. For example, the ``PySide2.QtNetwork`` python extension
|
||||
# module provides bindings for the ``QtNetwork`` Qt module from the ``qt/qtbase`` Qt repository.
|
||||
#
|
||||
# A Qt module can be considered as consisting of:
|
||||
# * a shared library (for example, on Linux, the shared library names for the ``QtNetwork`` Qt module in Qt5 and Qt6
|
||||
# are ``libQt5Network.so`` and ``libQt6Network.so``, respectively).
|
||||
# * plugins: a certain type (or class) of plugins is usually associated with a single Qt module (for example,
|
||||
# ``imageformats`` plugins are associated with the ``QtGui`` Qt module from the ``qt/qtbase`` Qt repository), but
|
||||
# additional plugins of that type may come from other Qt repositories. For example, ``imageformats/qsvg`` plugin
|
||||
# is provided by ``qtsvg/src/plugins/imageformats/svg`` from the ``qt/qtsvg`` repository, and ``imageformats/qpdf``
|
||||
# is provided by ``qtwebengine/src/pdf/plugins/imageformats/pdf`` from the ``qt/qtwebengine`` repository.
|
||||
# * translation files: names of translation files consist of a base name, which typically corresponds to the Qt
|
||||
# repository name, and language code. A single translation file usually covers all Qt modules contained within
|
||||
# the same repository. For example, translation files with base name ``qtbase`` contain translations for ``QtCore``,
|
||||
# ``QtGui``, ``QtWidgets``, ``QtNetwork``, and other Qt modules from the ``qt/qtbase`` Qt repository.
|
||||
#
|
||||
# The PyInstaller's built-in analysis of link-time dependencies ensures that when collecting a Qt python extension
|
||||
# module, we automatically pick up the linked Qt shared libraries. However, collection of linked Qt shared libraries
|
||||
# does not result in collection of plugins, nor translation files. In addition, the dependency of a Qt python extension
|
||||
# module on other Qt python extension modules (i.e., at the bindings level) cannot be automatically determined due to
|
||||
# PyInstaller's inability to scan imports in binary extensions.
|
||||
#
|
||||
# PyInstaller < 5.7 solved this problem using a dictionary that associated a Qt shared library name with python
|
||||
# extension name, plugins, and translation files. For each hooked Qt python extension module, the hook calls a helper
|
||||
# that analyzes the extension file for link-time dependencies, and matches those against the dictionary. Therefore,
|
||||
# based on linked shared libraries, we could recursively infer the list of files to collect in addition to the shared
|
||||
# libraries themselves:
|
||||
# - plugins and translation files belonging to Qt modules whose shared libraries we collect
|
||||
# - Qt python extension modules corresponding to the Qt modules that we collect
|
||||
#
|
||||
# The above approach ensures that even if analyzed python script contains only ``from PySide2 import QtWidgets``,
|
||||
# we would also collect ``PySide2.QtGui`` and ``PySide2.QtCore``, as well as all corresponding Qt module files
|
||||
# (the shared libraries, plugins, translation files). For this to work, a hook must be provided for the
|
||||
# ``PySide2.QtWidgets`` that performs the recursive analysis of the extension module file; so to ensure that each
|
||||
# Qt python extension module by itself ensures collection of all its dependencies, we need to hook all Qt python
|
||||
# extension modules provided by specific python Qt bindings package.
|
||||
#
|
||||
# The above approach with single dictionary, however, has several limitations:
|
||||
# - it cannot provide association for Qt python module that binds a Qt module without a shared library (i.e., a
|
||||
# headers-only module, or a statically-built module). In such cases, potential plugins and translations should
|
||||
# be associated directly with the Qt python extension file instead of the Qt module's (non-existent) shared library.
|
||||
# - it cannot (directly) handle differences between Qt5 and Qt6; we had to build a second dictionary
|
||||
# - it cannot handle differences between the bindings themselves; for example, PyQt5 binds some Qt modules that
|
||||
# PySide2 does not bind. Or, the binding's Qt python extension module is named differently in PyQt and PySide
|
||||
# bindings (or just differently in PyQt5, while PySide2, PySide6, and PyQt6 use the same name).
|
||||
#
|
||||
# In order address the above shortcomings, we now store all information a list of structures that contain information
|
||||
# for a particular Qt python extension and/or Qt module (shared library):
|
||||
# - python extension name (if applicable)
|
||||
# - Qt module name base (if applicable)
|
||||
# - plugins
|
||||
# - translation files base name
|
||||
# - applicable Qt version (if necessary)
|
||||
# - applicable Qt bindings (if necessary)
|
||||
#
|
||||
# This list is used to dynamically construct two dictionaries (based on the bindings name and Qt version):
|
||||
# - mapping python extension names to associated module information
|
||||
# - mapping Qt shared library names to associated module information
|
||||
# This allows us to associate plugins and translations with either Qt python extension or with the Qt module's shared
|
||||
# library (or both), whichever is applicable.
|
||||
#
|
||||
# The `qt_dynamic_dependencies_dict`_ from the original approach was constructed using several information sources, as
|
||||
# documented `here
|
||||
# <https://github.com/pyinstaller/pyinstaller/blob/fbf7948be85177dd44b41217e9f039e1d176de6b/PyInstaller/utils/hooks/qt.py#L266-L362>`_.
|
||||
#
|
||||
# In the current approach, the relations stored in the `QT_MODULES_INFO`_ list were determined directly, by inspecting
|
||||
# the Qt source code. This requires some prior knowledge of how the Qt code is organized (repositories and individual Qt
|
||||
# modules within them), as well as some searching based on guesswork. The procedure can be outlined as follows:
|
||||
# * check out the `main Qt repository <git://code.qt.io/qt/qt5.git>`_. This repository contains references to all other
|
||||
# Qt repositories in the form of git submodules.
|
||||
# * for Qt5:
|
||||
# * check out the latest release tag, e.g., v5.15.2, then check out the submodules.
|
||||
# * search the Qt modules' qmake .pro files; for example, ``qtbase/src/network/network.pro`` for QtNetwork module.
|
||||
# The plugin types associated with the module are listed in the ``MODULE_PLUGIN_TYPES`` variable (in this case,
|
||||
# ``bearer``).
|
||||
# * all translations are gathered in ``qttranslations`` sub-module/repository, and their association with
|
||||
# individual repositories can be seen in ``qttranslations/translations/translations.pro``.
|
||||
# * for Qt6:
|
||||
# * check out the latest release tag, e.g., v6.3.1, then check out the submodules.
|
||||
# * search the Qt modules' CMake files; for example, ``qtbase/src/network/CMakeLists.txt`` for QtNetwork module.
|
||||
# The plugin types associated with the module are listed under ``PLUGIN_TYPES`` argument of the
|
||||
# ``qt_internal_add_module()`` function that defines the Qt module.
|
||||
#
|
||||
# The idea is to make a list of all extension modules found in a Qt bindings package, as well as all available plugin
|
||||
# directories (which correspond to plugin types) and translation files. For each extension, identify the corresponding
|
||||
# Qt module (shared library name) and its associated plugins and translation files. Once this is done, most of available
|
||||
# plugins and translations in the python bindings package should have a corresponding python Qt extension module
|
||||
# available; this gives us associations based on the python extension module names as well as based on the Qt shared
|
||||
# library names. For any plugins and translation files remaining unassociated, identify the corresponding Qt module;
|
||||
# this gives us associations based only on Qt shared library names. While this second group of associations are never
|
||||
# processed directly (due to lack of corresponding python extension), they may end up being processed during the
|
||||
# recursive dependency analysis, if the corresponding Qt shared library is linked against by some Qt python extension
|
||||
# or another Qt shared library.
|
||||
|
||||
|
||||
# This structure is used to define Qt module information, such as python module/extension name, Qt module (shared
|
||||
# library) name, translation files' base names, plugins, as well as associated python bindings (which implicitly
|
||||
# also encode major Qt version).
|
||||
class _QtModuleDef:
|
||||
def __init__(self, module, shared_lib=None, translations=None, plugins=None, bindings=None):
|
||||
# Python module (extension) name without package namespace. For example, `QtCore`.
|
||||
# Can be None if python bindings do not bind the module, but we still need to establish relationship between
|
||||
# the Qt module (shared library) and its plugins and translations.
|
||||
self.module = module
|
||||
# Associated Qt module (shared library), if any. Used during recursive dependency analysis, where a python
|
||||
# module (extension) is analyzed for linked Qt modules (shared libraries), and then their corresponding
|
||||
# python modules (extensions) are added to hidden imports. For example, the Qt module name is `Qt5Core` or
|
||||
# `Qt6Core`, depending on the Qt version. Can be None for python modules that are not tied to a particular
|
||||
# Qt shared library (for example, the corresponding Qt module is headers-only) and hence they cannot be
|
||||
# inferred from recursive link-time dependency analysis.
|
||||
self.shared_lib = shared_lib
|
||||
# List of base names of translation files (if any) associated with the Qt module. Multiple base names may be
|
||||
# associated with a single module.
|
||||
# For example, `['qt', 'qtbase']` for `QtCore` or `['qtmultimedia']` for `QtMultimedia`.
|
||||
self.translations = translations or []
|
||||
# List of plugins associated with the Qt module.
|
||||
self.plugins = plugins or []
|
||||
# List of bindings (PySide2, PyQt5, PySide6, PyQt6) that provide the python module. This allows association of
|
||||
# plugins and translations with shared libraries even for bindings that do not provide python module binding
|
||||
# for the Qt module.
|
||||
self.bindings = set(bindings or [])
|
||||
|
||||
|
||||
# All Qt-based bindings.
|
||||
ALL_QT_BINDINGS = {"PySide2", "PyQt5", "PySide6", "PyQt6"}
|
||||
|
||||
# Qt modules information - the core of our Qt collection approach.
|
||||
#
|
||||
# For every python module/extension (i.e., entry in the list below that has valid `module`), we need a corresponding
|
||||
# hook, ensuring that the extension file is analyzed, so that we collect the associated plugins and translation
|
||||
# files, as well as perform recursive analysis of link-time binary dependencies (so that plugins and translation files
|
||||
# belonging to those dependencies are collected as well).
|
||||
QT_MODULES_INFO = (
|
||||
# *** qt/qt3d ***
|
||||
_QtModuleDef("Qt3DAnimation", shared_lib="3DAnimation"),
|
||||
_QtModuleDef("Qt3DCore", shared_lib="3DCore"),
|
||||
_QtModuleDef("Qt3DExtras", shared_lib="3DExtras"),
|
||||
_QtModuleDef("Qt3DInput", shared_lib="3DInput", plugins=["3dinputdevices"]),
|
||||
_QtModuleDef("Qt3DLogic", shared_lib="3DLogic"),
|
||||
_QtModuleDef(
|
||||
"Qt3DRender", shared_lib="3DRender", plugins=["geometryloaders", "renderplugins", "renderers", "sceneparsers"]
|
||||
),
|
||||
|
||||
# *** qt/qtactiveqt ***
|
||||
# The python module is called QAxContainer in PyQt bindings, but QtAxContainer in PySide. The associated Qt module
|
||||
# is header-only, so there is no shared library.
|
||||
_QtModuleDef("QAxContainer", bindings=["PyQt*"]),
|
||||
_QtModuleDef("QtAxContainer", bindings=["PySide*"]),
|
||||
|
||||
# *** qt/qtcharts ***
|
||||
# The python module is called QtChart in PyQt5, and QtCharts in PySide2, PySide6, and PyQt6 (which corresponds to
|
||||
# the associated Qt module name, QtCharts).
|
||||
_QtModuleDef("QtChart", shared_lib="Charts", bindings=["PyQt5"]),
|
||||
_QtModuleDef("QtCharts", shared_lib="Charts", bindings=["!PyQt5"]),
|
||||
|
||||
# *** qt/qtbase ***
|
||||
# QtConcurrent python module is available only in PySide bindings.
|
||||
_QtModuleDef(None, shared_lib="Concurrent", bindings=["PyQt*"]),
|
||||
_QtModuleDef("QtConcurrent", shared_lib="Concurrent", bindings=["PySide*"]),
|
||||
_QtModuleDef("QtCore", shared_lib="Core", translations=["qt", "qtbase"]),
|
||||
# QtDBus python module is available in all bindings but PySide2.
|
||||
_QtModuleDef(None, shared_lib="DBus", bindings=["PySide2"]),
|
||||
_QtModuleDef("QtDBus", shared_lib="DBus", bindings=["!PySide2"]),
|
||||
# QtNetwork uses different plugins in Qt5 and Qt6.
|
||||
_QtModuleDef("QtNetwork", shared_lib="Network", plugins=["bearer"], bindings=["PySide2", "PyQt5"]),
|
||||
_QtModuleDef(
|
||||
"QtNetwork",
|
||||
shared_lib="Network",
|
||||
plugins=["networkaccess", "networkinformation", "tls"],
|
||||
bindings=["PySide6", "PyQt6"]
|
||||
),
|
||||
_QtModuleDef(
|
||||
"QtGui",
|
||||
shared_lib="Gui",
|
||||
plugins=[
|
||||
"accessiblebridge",
|
||||
"egldeviceintegrations",
|
||||
"generic",
|
||||
"iconengines",
|
||||
"imageformats",
|
||||
"platforms",
|
||||
"platforms/darwin",
|
||||
"platforminputcontexts",
|
||||
"platformthemes",
|
||||
"xcbglintegrations",
|
||||
# The ``wayland-*`` plugins are part of QtWaylandClient Qt module, whose shared library
|
||||
# (e.g., libQt5WaylandClient.so) is linked by the wayland-related ``platforms`` plugins. Ideally, we would
|
||||
# collect these plugins based on the QtWaylandClient shared library entry, but as our Qt hook utilities do
|
||||
# not scan the plugins for dependencies, that would not work. So instead we list these plugins under QtGui
|
||||
# to achieve pretty much the same end result.
|
||||
"wayland-decoration-client",
|
||||
"wayland-graphics-integration-client",
|
||||
"wayland-shell-integration"
|
||||
]
|
||||
),
|
||||
_QtModuleDef("QtOpenGL", shared_lib="OpenGL"),
|
||||
# This python module is specific to PySide2 and has no associated Qt module.
|
||||
_QtModuleDef("QtOpenGLFunctions", bindings=["PySide2"]),
|
||||
# This Qt module was introduced with Qt6.
|
||||
_QtModuleDef("QtOpenGLWidgets", shared_lib="OpenGLWidgets", bindings=["PySide6", "PyQt6"]),
|
||||
_QtModuleDef("QtPrintSupport", shared_lib="PrintSupport", plugins=["printsupport"]),
|
||||
_QtModuleDef("QtSql", shared_lib="Sql", plugins=["sqldrivers"]),
|
||||
_QtModuleDef("QtTest", shared_lib="Test"),
|
||||
_QtModuleDef("QtWidgets", shared_lib="Widgets", plugins=["styles"]),
|
||||
_QtModuleDef("QtXml", shared_lib="Xml"),
|
||||
|
||||
# *** qt/qtconnectivity ***
|
||||
_QtModuleDef("QtBluetooth", shared_lib="QtBluetooth", translations=["qtconnectivity"]),
|
||||
_QtModuleDef("QtNfc", shared_lib="Nfc", translations=["qtconnectivity"]),
|
||||
|
||||
# *** qt/qtdatavis3d ***
|
||||
_QtModuleDef("QtDataVisualization", shared_lib="DataVisualization"),
|
||||
|
||||
# *** qt/qtdeclarative ***
|
||||
_QtModuleDef("QtQml", shared_lib="Qml", translations=["qtdeclarative"], plugins=["qmltooling"]),
|
||||
# Have the Qt5 variant collect translations for qtquickcontrols (qt/qtquickcontrols provides only QtQuick plugins).
|
||||
_QtModuleDef(
|
||||
"QtQuick",
|
||||
shared_lib="Quick",
|
||||
translations=["qtquickcontrols"],
|
||||
plugins=["scenegraph"],
|
||||
bindings=["PySide2", "PyQt5"]
|
||||
),
|
||||
_QtModuleDef("QtQuick", shared_lib="Quick", plugins=["scenegraph"], bindings=["PySide6", "PyQt6"]),
|
||||
# Qt6-only; in Qt5, this module is part of qt/qtquickcontrols2. Python module is available only in PySide6.
|
||||
_QtModuleDef(None, shared_lib="QuickControls2", bindings=["PyQt6"]),
|
||||
_QtModuleDef("QtQuickControls2", shared_lib="QuickControls2", bindings=["PySide6"]),
|
||||
_QtModuleDef("QtQuickWidgets", shared_lib="QuickWidgets"),
|
||||
|
||||
# *** qt/qtgamepad ***
|
||||
# No python module; shared library -> plugins association entry.
|
||||
_QtModuleDef(None, shared_lib="Gamepad", plugins=["gamepads"]),
|
||||
|
||||
# *** qt/qtgraphs ***
|
||||
# Qt6 >= 6.6.0; python module is available only in PySide6.
|
||||
_QtModuleDef("QtGraphs", shared_lib="Graphs", bindings=["PySide6"]),
|
||||
|
||||
# *** qt/qthttpserver ***
|
||||
# Qt6 >= 6.4.0; python module is available only in PySide6.
|
||||
_QtModuleDef("QtHttpServer", shared_lib="HttpServer", bindings=["PySide6"]),
|
||||
|
||||
# *** qt/qtlocation ***
|
||||
# QtLocation was reintroduced in Qt6 v6.5.0.
|
||||
_QtModuleDef(
|
||||
"QtLocation",
|
||||
shared_lib="Location",
|
||||
translations=["qtlocation"],
|
||||
plugins=["geoservices"],
|
||||
bindings=["PySide2", "PyQt5", "PySide6"]
|
||||
),
|
||||
_QtModuleDef(
|
||||
"QtPositioning",
|
||||
shared_lib="Positioning",
|
||||
translations=["qtlocation"],
|
||||
plugins=["position"],
|
||||
),
|
||||
|
||||
# *** qt/qtmacextras ***
|
||||
# Qt5-only Qt module.
|
||||
_QtModuleDef("QtMacExtras", shared_lib="MacExtras", bindings=["PySide2", "PyQt5"]),
|
||||
|
||||
# *** qt/qtmultimedia ***
|
||||
# QtMultimedia on Qt6 currently uses only a subset of plugin names from Qt5 counterpart.
|
||||
_QtModuleDef(
|
||||
"QtMultimedia",
|
||||
shared_lib="Multimedia",
|
||||
translations=["qtmultimedia"],
|
||||
plugins=[
|
||||
"mediaservice", "audio", "video/bufferpool", "video/gstvideorenderer", "video/videonode", "playlistformats",
|
||||
"resourcepolicy"
|
||||
],
|
||||
bindings=["PySide2", "PyQt5"]
|
||||
),
|
||||
_QtModuleDef(
|
||||
"QtMultimedia",
|
||||
shared_lib="Multimedia",
|
||||
translations=["qtmultimedia"],
|
||||
# `multimedia` plugins are available as of Qt6 >= 6.4.0; earlier versions had `video/gstvideorenderer` and
|
||||
# `video/videonode` plugins.
|
||||
plugins=["multimedia", "video/gstvideorenderer", "video/videonode"],
|
||||
bindings=["PySide6", "PyQt6"]
|
||||
),
|
||||
_QtModuleDef("QtMultimediaWidgets", shared_lib="MultimediaWidgets"),
|
||||
# Qt6-only Qt module; python module is available in PySide6 >= 6.4.0 and PyQt6 >= 6.5.0
|
||||
_QtModuleDef("QtSpatialAudio", shared_lib="SpatialAudio", bindings=["PySide6", "PyQt6"]),
|
||||
|
||||
# *** qt/qtnetworkauth ***
|
||||
# QtNetworkAuth python module is available in all bindings but PySide2.
|
||||
_QtModuleDef(None, shared_lib="NetworkAuth", bindings=["PySide2"]),
|
||||
_QtModuleDef("QtNetworkAuth", shared_lib="NetworkAuth", bindings=["!PySide2"]),
|
||||
|
||||
# *** qt/qtpurchasing ***
|
||||
# Qt5-only Qt module, python module is available only in PyQt5.
|
||||
_QtModuleDef("QtPurchasing", shared_lib="Purchasing", bindings=["PyQt5"]),
|
||||
|
||||
# *** qt/qtquick1 ***
|
||||
# This is an old, Qt 5.3-era module...
|
||||
_QtModuleDef(
|
||||
"QtDeclarative",
|
||||
shared_lib="Declarative",
|
||||
translations=["qtquick1"],
|
||||
plugins=["qml1tooling"],
|
||||
bindings=["PySide2", "PyQt5"]
|
||||
),
|
||||
|
||||
# *** qt/qtquick3d ***
|
||||
# QtQuick3D python module is available in all bindings but PySide2.
|
||||
_QtModuleDef(None, shared_lib="Quick3D", bindings=["PySide2"]),
|
||||
_QtModuleDef("QtQuick3D", shared_lib="Quick3D", bindings=["!PySide2"]),
|
||||
# No python module; shared library -> plugins association entry.
|
||||
_QtModuleDef(None, shared_lib="Quick3DAssetImport", plugins=["assetimporters"]),
|
||||
|
||||
# *** qt/qtquickcontrols2 ***
|
||||
# Qt5-only module; in Qt6, this module is part of qt/declarative. Python module is available only in PySide2.
|
||||
_QtModuleDef(None, translations=["qtquickcontrols2"], shared_lib="QuickControls2", bindings=["PyQt5"]),
|
||||
_QtModuleDef(
|
||||
"QtQuickControls2", translations=["qtquickcontrols2"], shared_lib="QuickControls2", bindings=["PySide2"]
|
||||
),
|
||||
|
||||
# *** qt/qtremoteobjects ***
|
||||
_QtModuleDef("QtRemoteObjects", shared_lib="RemoteObjects"),
|
||||
|
||||
# *** qt/qtscxml ***
|
||||
# Python module is available only in PySide bindings. Plugins are available only in Qt6.
|
||||
# PyQt wheels do not seem to ship the corresponding Qt modules (shared libs) at all.
|
||||
_QtModuleDef("QtScxml", shared_lib="Scxml", bindings=["PySide2"]),
|
||||
_QtModuleDef("QtScxml", shared_lib="Scxml", plugins=["scxmldatamodel"], bindings=["PySide6"]),
|
||||
# Qt6-only Qt module, python module is available only in PySide6.
|
||||
_QtModuleDef("QtStateMachine", shared_lib="StateMachine", bindings=["PySide6"]),
|
||||
|
||||
# *** qt/qtsensors ***
|
||||
_QtModuleDef("QtSensors", shared_lib="Sensors", plugins=["sensors", "sensorgestures"]),
|
||||
|
||||
# *** qt/qtserialport ***
|
||||
_QtModuleDef("QtSerialPort", shared_lib="SerialPort", translations=["qtserialport"]),
|
||||
|
||||
# *** qt/qtscript ***
|
||||
# Qt5-only Qt module, python module is available only in PySide2. PyQt5 wheels do not seem to ship the corresponding
|
||||
# Qt modules (shared libs) at all.
|
||||
_QtModuleDef("QtScript", shared_lib="Script", translations=["qtscript"], plugins=["script"], bindings=["PySide2"]),
|
||||
_QtModuleDef("QtScriptTools", shared_lib="ScriptTools", bindings=["PySide2"]),
|
||||
|
||||
# *** qt/qtserialbus ***
|
||||
# No python module; shared library -> plugins association entry.
|
||||
# PySide6 6.5.0 introduced python module.
|
||||
_QtModuleDef(None, shared_lib="SerialBus", plugins=["canbus"], bindings=["!PySide6"]),
|
||||
_QtModuleDef("QtSerialBus", shared_lib="SerialBus", plugins=["canbus"], bindings=["PySide6"]),
|
||||
|
||||
# *** qt/qtsvg ***
|
||||
_QtModuleDef("QtSvg", shared_lib="Svg"),
|
||||
# Qt6-only Qt module.
|
||||
_QtModuleDef("QtSvgWidgets", shared_lib="SvgWidgets", bindings=["PySide6", "PyQt6"]),
|
||||
|
||||
# *** qt/qtspeech ***
|
||||
_QtModuleDef("QtTextToSpeech", shared_lib="TextToSpeech", plugins=["texttospeech"]),
|
||||
|
||||
# *** qt/qttools ***
|
||||
# QtDesigner python module is available in all bindings but PySide2.
|
||||
_QtModuleDef(None, shared_lib="Designer", plugins=["designer"], bindings=["PySide2"]),
|
||||
_QtModuleDef(
|
||||
"QtDesigner", shared_lib="Designer", translations=["designer"], plugins=["designer"], bindings=["!PySide2"]
|
||||
),
|
||||
_QtModuleDef("QtHelp", shared_lib="Help", translations=["qt_help"]),
|
||||
# Python module is available only in PySide bindings.
|
||||
_QtModuleDef("QtUiTools", shared_lib="UiTools", bindings=["PySide*"]),
|
||||
|
||||
# *** qt/qtvirtualkeyboard ***
|
||||
# No python module; shared library -> plugins association entry.
|
||||
_QtModuleDef(None, shared_lib="VirtualKeyboard", plugins=["virtualkeyboard"]),
|
||||
|
||||
# *** qt/qtwebchannel ***
|
||||
_QtModuleDef("QtWebChannel", shared_lib="WebChannel"),
|
||||
|
||||
# *** qt/qtwebengine ***
|
||||
# QtWebEngine is Qt5-only module (replaced by QtWebEngineQuick in Qt6).
|
||||
_QtModuleDef("QtWebEngine", shared_lib="WebEngine", bindings=["PySide2", "PyQt5"]),
|
||||
_QtModuleDef("QtWebEngineCore", shared_lib="WebEngineCore", translations=["qtwebengine"]),
|
||||
# QtWebEngineQuick is Qt6-only module (replacement for QtWebEngine in Qt5).
|
||||
_QtModuleDef("QtWebEngineQuick", shared_lib="WebEngineQuick", bindings=["PySide6", "PyQt6"]),
|
||||
_QtModuleDef("QtWebEngineWidgets", shared_lib="WebEngineWidgets"),
|
||||
# QtPdf and QtPdfWidgets have python module available in PySide6 and PyQt6 >= 6.4.0.
|
||||
_QtModuleDef("QtPdf", shared_lib="Pdf", bindings=["PySide6", "PyQt6"]),
|
||||
_QtModuleDef("QtPdfWidgets", shared_lib="PdfWidgets", bindings=["PySide6", "PyQt6"]),
|
||||
|
||||
# *** qt/qtwebsockets ***
|
||||
_QtModuleDef("QtWebSockets", shared_lib="WebSockets", translations=["qtwebsockets"]),
|
||||
|
||||
# *** qt/qtwebview ***
|
||||
# No python module; shared library -> plugins association entry.
|
||||
_QtModuleDef(None, shared_lib="WebView", plugins=["webview"]),
|
||||
|
||||
# *** qt/qtwinextras ***
|
||||
# Qt5-only Qt module.
|
||||
_QtModuleDef("QtWinExtras", shared_lib="WinExtras", bindings=["PySide2", "PyQt5"]),
|
||||
|
||||
# *** qt/qtx11extras ***
|
||||
# Qt5-only Qt module.
|
||||
_QtModuleDef("QtX11Extras", shared_lib="X11Extras", bindings=["PySide2", "PyQt5"]),
|
||||
|
||||
# *** qt/qtxmlpatterns ***
|
||||
# Qt5-only Qt module.
|
||||
_QtModuleDef(
|
||||
"QtXmlPatterns", shared_lib="XmlPatterns", translations=["qtxmlpatterns"], bindings=["PySide2", "PyQt5"]
|
||||
),
|
||||
|
||||
# *** qscintilla ***
|
||||
# Python module is available only in PyQt bindings. No associated shared library.
|
||||
_QtModuleDef("Qsci", translations=["qscintilla"], bindings=["PyQt*"]),
|
||||
)
|
||||
|
||||
|
||||
# Helpers for turning Qt namespace specifiers, such as "!PySide2" or "PyQt*", into set of applicable
|
||||
# namespaces.
|
||||
def process_namespace_strings(namespaces):
|
||||
""""Process list of Qt namespace specifier strings into set of namespaces."""
|
||||
bindings = set()
|
||||
for namespace in namespaces:
|
||||
bindings |= _process_namespace_string(namespace)
|
||||
return bindings
|
||||
|
||||
|
||||
def _process_namespace_string(namespace):
|
||||
"""Expand a Qt namespace specifier string into set of namespaces."""
|
||||
if namespace.startswith("!"):
|
||||
bindings = _process_namespace_string(namespace[1:])
|
||||
return ALL_QT_BINDINGS - bindings
|
||||
else:
|
||||
if namespace == "PySide*":
|
||||
return {"PySide2", "PySide6"}
|
||||
elif namespace == "PyQt*":
|
||||
return {"PyQt5", "PyQt6"}
|
||||
elif namespace in ALL_QT_BINDINGS:
|
||||
return {namespace}
|
||||
else:
|
||||
raise ValueError(f"Invalid Qt namespace specifier: {namespace}!")
|
||||
336
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/setuptools.py
Executable file
336
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/setuptools.py
Executable file
@@ -0,0 +1,336 @@
|
||||
# ----------------------------------------------------------------------------
|
||||
# Copyright (c) 2024, PyInstaller Development Team.
|
||||
#
|
||||
# Distributed under the terms of the GNU General Public License (version 2
|
||||
# or later) with exception for distributing the bootloader.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#
|
||||
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
|
||||
#-----------------------------------------------------------------------------
|
||||
from PyInstaller import log as logging
|
||||
from PyInstaller import isolated
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Import setuptools and analyze its properties in an isolated subprocess. This function is called by `SetuptoolsInfo`
|
||||
# to initialize its properties.
|
||||
@isolated.decorate
|
||||
def _retrieve_setuptools_info():
|
||||
import importlib
|
||||
|
||||
try:
|
||||
setuptools = importlib.import_module("setuptools") # noqa: F841
|
||||
except ModuleNotFoundError:
|
||||
return None
|
||||
|
||||
# Delay these imports until after we have confirmed that setuptools is importable.
|
||||
import pathlib
|
||||
|
||||
import packaging.version
|
||||
|
||||
from PyInstaller.compat import importlib_metadata
|
||||
from PyInstaller.utils.hooks import (
|
||||
collect_data_files,
|
||||
collect_submodules,
|
||||
)
|
||||
|
||||
# Try to retrieve the version. At this point, failure is consider an error.
|
||||
version_string = importlib_metadata.version("setuptools")
|
||||
version = packaging.version.Version(version_string).release # Use the version tuple
|
||||
|
||||
# setuptools >= 60.0 its vendored copy of distutils (mainly due to its removal from stdlib in python >= 3.12).
|
||||
distutils_vendored = False
|
||||
distutils_modules = []
|
||||
if version >= (60, 0):
|
||||
distutils_vendored = True
|
||||
distutils_modules += ["_distutils_hack"]
|
||||
distutils_modules += collect_submodules(
|
||||
"setuptools._distutils",
|
||||
# setuptools 71.0.1 ~ 71.0.4 include `setuptools._distutils.tests`; avoid explicitly collecting it
|
||||
# (t was not included in earlier setuptools releases).
|
||||
filter=lambda name: name != 'setuptools._distutils.tests',
|
||||
)
|
||||
|
||||
# Check if `setuptools._vendor` exists. Some linux distributions opt to de-vendor `setuptools` and remove the
|
||||
# `setuptools._vendor` directory altogether. If this is the case, most of additional processing below should be
|
||||
# skipped to avoid errors and warnings about non-existent `setuptools._vendor` module.
|
||||
try:
|
||||
setuptools_vendor = importlib.import_module("setuptools._vendor")
|
||||
except ModuleNotFoundError:
|
||||
setuptools_vendor = None
|
||||
|
||||
# Check for exposed packages/modules that are vendored by setuptools. If stand-alone version is not provided in the
|
||||
# environment, setuptools-vendored version is exposed (due to location of `setuptools._vendor` being appended to
|
||||
# `sys.path`. Applicable to v71.0.0 and later.
|
||||
vendored_status = dict()
|
||||
vendored_namespace_package_paths = dict()
|
||||
if version >= (71, 0) and setuptools_vendor is not None:
|
||||
VENDORED_TOP_LEVEL_NAMESPACE_CANDIDATES = (
|
||||
"backports", # "regular" package, but has namespace semantics due to `pkgutil.extend_path()`
|
||||
"jaraco", # PEP-420 namespace package
|
||||
)
|
||||
|
||||
VENDORED_CANDIDATES = (
|
||||
"autocommand",
|
||||
"backports.tarfile",
|
||||
"importlib_metadata",
|
||||
"importlib_resources",
|
||||
"inflect",
|
||||
"jaraco.context",
|
||||
"jaraco.functools",
|
||||
"jaraco.text",
|
||||
"more_itertools",
|
||||
"ordered_set",
|
||||
"packaging",
|
||||
"platformdirs",
|
||||
"tomli",
|
||||
"typeguard",
|
||||
"typing_extensions",
|
||||
"wheel",
|
||||
"zipp",
|
||||
)
|
||||
|
||||
# Resolve path(s) of `setuptools_vendor` package.
|
||||
setuptools_vendor_paths = [pathlib.Path(path).resolve() for path in setuptools_vendor.__path__]
|
||||
|
||||
# Process each candidate: top-level namespace packages
|
||||
for candidate_name in VENDORED_TOP_LEVEL_NAMESPACE_CANDIDATES:
|
||||
try:
|
||||
candidate = importlib.import_module(candidate_name)
|
||||
except ImportError:
|
||||
continue
|
||||
|
||||
# Retrieve the __path__ attribute and store it, so we can re-use it in hooks without having to re-import
|
||||
# `setuptools` and the candidate package...
|
||||
candidate_path_attr = getattr(candidate, '__path__', [])
|
||||
if candidate_path_attr:
|
||||
candidate_paths = [pathlib.Path(path).resolve() for path in candidate_path_attr]
|
||||
is_vendored = [
|
||||
any([
|
||||
setuptools_vendor_path in candidate_path.parents or candidate_path == setuptools_vendor_path
|
||||
for setuptools_vendor_path in setuptools_vendor_paths
|
||||
]) for candidate_path in candidate_paths
|
||||
]
|
||||
# For namespace packages, distinguish between "fully" vendored and "partially" vendored state; i.e.,
|
||||
# whether the part of namespace package in the vendored directory is the only part or not.
|
||||
if all(is_vendored):
|
||||
vendored_status[candidate_name] = 'fully'
|
||||
elif any(is_vendored):
|
||||
vendored_status[candidate_name] = 'partially'
|
||||
else:
|
||||
vendored_status[candidate_name] = False
|
||||
|
||||
# Store paths
|
||||
vendored_namespace_package_paths[candidate_name] = [str(path) for path in candidate_path_attr]
|
||||
|
||||
# Process each candidate: modules and packages
|
||||
for candidate_name in VENDORED_CANDIDATES:
|
||||
try:
|
||||
candidate = importlib.import_module(candidate_name)
|
||||
except ImportError:
|
||||
continue
|
||||
|
||||
# Check the __file__ attribute (modules and regular packages). Will not work with namespace packages, but
|
||||
# at the moment, there are none (vendored top-level namespace packages have already been handled).
|
||||
candidate_file_attr = getattr(candidate, '__file__', None)
|
||||
if candidate_file_attr is not None:
|
||||
candidate_path = pathlib.Path(candidate_file_attr).parent.resolve()
|
||||
is_vendored = any([
|
||||
setuptools_vendor_path in candidate_path.parents or candidate_path == setuptools_vendor_path
|
||||
for setuptools_vendor_path in setuptools_vendor_paths
|
||||
])
|
||||
vendored_status[candidate_name] = is_vendored # True/False
|
||||
|
||||
# Collect submodules from `setuptools._vendor`, regardless of whether the vendored package is exposed or
|
||||
# not (because setuptools might need/use it either way).
|
||||
vendored_modules = []
|
||||
if setuptools_vendor is not None:
|
||||
EXCLUDED_VENDORED_MODULES = (
|
||||
# Prevent recursing into setuptools._vendor.pyparsing.diagram, which typically fails to be imported due
|
||||
# to missing dependencies (railroad, pyparsing (?), jinja2) and generates a warning... As the module is
|
||||
# usually unimportable, it is likely not to be used by setuptools. NOTE: pyparsing was removed from
|
||||
# vendored packages in setuptools v67.0.0; keep this exclude around for earlier versions.
|
||||
'setuptools._vendor.pyparsing.diagram',
|
||||
# Setuptools >= 71 started shipping vendored dependencies that include tests; avoid collecting those via
|
||||
# hidden imports. (Note that this also prevents creation of aliases for these module, but that should
|
||||
# not be an issue, as they should not be referenced from anywhere).
|
||||
'setuptools._vendor.importlib_resources.tests',
|
||||
# These appear to be utility scripts bundled with the jaraco.text package - exclude them.
|
||||
'setuptools._vendor.jaraco.text.show-newlines',
|
||||
'setuptools._vendor.jaraco.text.strip-prefix',
|
||||
'setuptools._vendor.jaraco.text.to-dvorak',
|
||||
'setuptools._vendor.jaraco.text.to-qwerty',
|
||||
)
|
||||
vendored_modules += collect_submodules(
|
||||
'setuptools._vendor',
|
||||
filter=lambda name: name not in EXCLUDED_VENDORED_MODULES,
|
||||
)
|
||||
|
||||
# `collect_submodules` (and its underlying `pkgutil.iter_modules` do not discover namespace sub-packages, in
|
||||
# this case `setuptools._vendor.jaraco`. So force a manual scan of modules/packages inside it.
|
||||
vendored_modules += collect_submodules(
|
||||
'setuptools._vendor.jaraco',
|
||||
filter=lambda name: name not in EXCLUDED_VENDORED_MODULES,
|
||||
)
|
||||
|
||||
# *** Data files for vendored packages ***
|
||||
vendored_data = []
|
||||
|
||||
if version >= (71, 0) and setuptools_vendor is not None:
|
||||
# Since the vendored dependencies from `setuptools/_vendor` are now visible to the outside world, make
|
||||
# sure we collect their metadata. (We cannot use copy_metadata here, because we need to collect data
|
||||
# files to their original locations).
|
||||
vendored_data += collect_data_files('setuptools._vendor', includes=['**/*.dist-info'])
|
||||
# Similarly, ensure that `Lorem ipsum.txt` from vendored jaraco.text is collected
|
||||
vendored_data += collect_data_files('setuptools._vendor.jaraco.text', includes=['**/Lorem ipsum.txt'])
|
||||
|
||||
# Return dictionary with collected information
|
||||
return {
|
||||
"available": True,
|
||||
"version": version,
|
||||
"distutils_vendored": distutils_vendored,
|
||||
"distutils_modules": distutils_modules,
|
||||
"vendored_status": vendored_status,
|
||||
"vendored_modules": vendored_modules,
|
||||
"vendored_data": vendored_data,
|
||||
"vendored_namespace_package_paths": vendored_namespace_package_paths,
|
||||
}
|
||||
|
||||
|
||||
class SetuptoolsInfo:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __repr__(self):
|
||||
return "SetuptoolsInfo"
|
||||
|
||||
# Delay initialization of setuptools information until until the corresponding attributes are first requested.
|
||||
def __getattr__(self, name):
|
||||
if 'available' in self.__dict__:
|
||||
# Initialization was already done, but requested attribute is not available.
|
||||
raise AttributeError(name)
|
||||
|
||||
# Load setuptools info...
|
||||
self._load_setuptools_info()
|
||||
# ... and return the requested attribute
|
||||
return getattr(self, name)
|
||||
|
||||
def _load_setuptools_info(self):
|
||||
logger.info("%s: initializing cached setuptools info...", self)
|
||||
|
||||
# Initialize variables so that they might be accessed even if setuptools is unavailable or if initialization
|
||||
# fails for some reason.
|
||||
self.available = False
|
||||
self.version = None
|
||||
self.distutils_vendored = False
|
||||
self.distutils_modules = []
|
||||
self.vendored_status = dict()
|
||||
self.vendored_modules = []
|
||||
self.vendored_data = []
|
||||
self.vendored_namespace_package_paths = dict()
|
||||
|
||||
try:
|
||||
setuptools_info = _retrieve_setuptools_info()
|
||||
except Exception as e:
|
||||
logger.warning("%s: failed to obtain setuptools info: %s", self, e)
|
||||
return
|
||||
|
||||
# If package could not be imported, `_retrieve_setuptools_info` returns None. In such cases, emit a debug
|
||||
# message instead of a warning, because this initialization might be triggered by a helper function that is
|
||||
# trying to determine availability of `setuptools` by inspecting the `available` attribute.
|
||||
if setuptools_info is None:
|
||||
logger.debug("%s: failed to obtain setuptools info: setuptools could not be imported.", self)
|
||||
return
|
||||
|
||||
# Copy properties
|
||||
for key, value in setuptools_info.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
def is_vendored(self, module_name):
|
||||
return self.vendored_status.get(module_name, False)
|
||||
|
||||
@staticmethod
|
||||
def _create_vendored_aliases(vendored_name, module_name, modules_list):
|
||||
# Create aliases for all submodules
|
||||
prefix_len = len(vendored_name) # Length of target-name prefix to remove
|
||||
return ((module_name + vendored_module[prefix_len:], vendored_module) for vendored_module in modules_list
|
||||
if vendored_module.startswith(vendored_name))
|
||||
|
||||
def get_vendored_aliases(self, module_name):
|
||||
vendored_name = f"setuptools._vendor.{module_name}"
|
||||
return self._create_vendored_aliases(vendored_name, module_name, self.vendored_modules)
|
||||
|
||||
def get_distutils_aliases(self):
|
||||
vendored_name = "setuptools._distutils"
|
||||
return self._create_vendored_aliases(vendored_name, "distutils", self.distutils_modules)
|
||||
|
||||
|
||||
setuptools_info = SetuptoolsInfo()
|
||||
|
||||
|
||||
def pre_safe_import_module_for_top_level_namespace_packages(api):
|
||||
"""
|
||||
A common implementation of pre_safe_import_module hook function for handling vendored top-level namespace packages
|
||||
(i.e., `backports` and `jaraco`).
|
||||
|
||||
This function can be either called from the `pre_safe_import_module` function in a pre-safe-import-module hook, or
|
||||
just imported into the hook and aliased to `pre_safe_import_module`.
|
||||
"""
|
||||
module_name = api.module_name
|
||||
|
||||
# Check if the package/module is a vendored copy. This also returns False is setuptools is unavailable, because
|
||||
# vendored module status dictionary will be empty.
|
||||
vendored = setuptools_info.is_vendored(module_name)
|
||||
if not vendored:
|
||||
return
|
||||
|
||||
if vendored == 'fully':
|
||||
# For a fully-vendored copy, force creation of aliases; on one hand, this aims to ensure that submodules are
|
||||
# resolvable, but on the other, it also prevents creation of unvendored top-level package, which should not
|
||||
# exit in this case.
|
||||
vendored_name = f"setuptools._vendor.{module_name}"
|
||||
logger.info(
|
||||
"Setuptools: %r appears to be a full setuptools-vendored copy - creating alias to %r!", module_name,
|
||||
vendored_name
|
||||
)
|
||||
# Create aliases for all (sub)modules
|
||||
for aliased_name, real_vendored_name in setuptools_info.get_vendored_aliases(module_name):
|
||||
api.add_alias_module(real_vendored_name, aliased_name)
|
||||
elif vendored == 'partially':
|
||||
# For a partially-vendored copy, adjust the submodule search paths, so that submodules from all locations are
|
||||
# discoverable (especially from the setuptools vendor directory, which might not be in the search path yet).
|
||||
search_paths = setuptools_info.vendored_namespace_package_paths.get(module_name, [])
|
||||
logger.info(
|
||||
"Setuptools: %r appears to be a partial setuptools-vendored copy - extending search paths to %r!",
|
||||
module_name, search_paths
|
||||
)
|
||||
for path in search_paths:
|
||||
api.append_package_path(path)
|
||||
else:
|
||||
logger.warning("Setuptools: %r has unhandled vendored status: %r", module_name, vendored)
|
||||
|
||||
|
||||
def pre_safe_import_module(api):
|
||||
"""
|
||||
A common implementation of pre_safe_import_module hook function.
|
||||
|
||||
This function can be either called from the `pre_safe_import_module` function in a pre-safe-import-module hook, or
|
||||
just imported into the hook.
|
||||
"""
|
||||
module_name = api.module_name
|
||||
|
||||
# Check if the package/module is a vendored copy. This also returns False is setuptools is unavailable, because
|
||||
# vendored module status dictionary will be empty.
|
||||
if not setuptools_info.is_vendored(module_name):
|
||||
return
|
||||
|
||||
vendored_name = f"setuptools._vendor.{module_name}"
|
||||
logger.info(
|
||||
"Setuptools: %r appears to be a setuptools-vendored copy - creating alias to %r!", module_name, vendored_name
|
||||
)
|
||||
|
||||
# Create aliases for all (sub)modules
|
||||
for aliased_name, real_vendored_name in setuptools_info.get_vendored_aliases(module_name):
|
||||
api.add_alias_module(real_vendored_name, aliased_name)
|
||||
348
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/tcl_tk.py
Executable file
348
venv/lib/python3.12/site-packages/PyInstaller/utils/hooks/tcl_tk.py
Executable file
@@ -0,0 +1,348 @@
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2013-2023, PyInstaller Development Team.
|
||||
#
|
||||
# Distributed under the terms of the GNU General Public License (version 2
|
||||
# or later) with exception for distributing the bootloader.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#
|
||||
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
import os
|
||||
import fnmatch
|
||||
|
||||
from PyInstaller import compat
|
||||
from PyInstaller import isolated
|
||||
from PyInstaller import log as logging
|
||||
from PyInstaller.depend import bindepend
|
||||
|
||||
if compat.is_darwin:
|
||||
from PyInstaller.utils import osx as osxutils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@isolated.decorate
|
||||
def _get_tcl_tk_info():
|
||||
"""
|
||||
Isolated-subprocess helper to retrieve the basic Tcl/Tk information:
|
||||
- tkinter_extension_file = the value of __file__ attribute of the _tkinter binary extension (path to file).
|
||||
- tcl_data_dir = path to the Tcl library/data directory.
|
||||
- tcl_version = Tcl version
|
||||
- tk_version = Tk version
|
||||
- tcl_theaded = boolean indicating whether Tcl/Tk is built with multi-threading support.
|
||||
"""
|
||||
try:
|
||||
import tkinter
|
||||
import _tkinter
|
||||
except ImportError:
|
||||
# tkinter unavailable
|
||||
return None
|
||||
try:
|
||||
tcl = tkinter.Tcl()
|
||||
except tkinter.TclError: # e.g. "Can't find a usable init.tcl in the following directories: ..."
|
||||
return None
|
||||
|
||||
# Query the location of Tcl library/data directory.
|
||||
tcl_data_dir = tcl.eval("info library")
|
||||
|
||||
# Check if Tcl/Tk is built with multi-threaded support (built with --enable-threads), as indicated by the presence
|
||||
# of optional `threaded` member in `tcl_platform` array.
|
||||
try:
|
||||
tcl.getvar("tcl_platform(threaded)") # Ignore the actual value.
|
||||
tcl_threaded = True
|
||||
except tkinter.TclError:
|
||||
tcl_threaded = False
|
||||
|
||||
return {
|
||||
"available": True,
|
||||
# If `_tkinter` is a built-in (as opposed to an extension), it does not have a `__file__` attribute.
|
||||
"tkinter_extension_file": getattr(_tkinter, '__file__', None),
|
||||
"tcl_version": _tkinter.TCL_VERSION,
|
||||
"tk_version": _tkinter.TK_VERSION,
|
||||
"tcl_threaded": tcl_threaded,
|
||||
"tcl_data_dir": tcl_data_dir,
|
||||
}
|
||||
|
||||
|
||||
class TclTkInfo:
|
||||
# Root directory names of Tcl and Tk library/data directories in the frozen application. These directories are
|
||||
# originally fully versioned (e.g., tcl8.6 and tk8.6); we want to remap them to unversioned variants, so that our
|
||||
# run-time hook (pyi_rthook__tkinter.py) does not have to determine version numbers when setting `TCL_LIBRARY`
|
||||
# and `TK_LIBRARY` environment variables.
|
||||
#
|
||||
# We also cannot use plain "tk" and "tcl", because on macOS, the Tcl and Tk shared libraries might come from
|
||||
# framework bundles, and would therefore end up being collected as "Tcl" and "Tk" in the top-level application
|
||||
# directory, causing clash due to filesystem being case-insensitive by default.
|
||||
TCL_ROOTNAME = '_tcl_data'
|
||||
TK_ROOTNAME = '_tk_data'
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def __repr__(self):
|
||||
return "TclTkInfo"
|
||||
|
||||
# Delay initialization of Tcl/Tk information until until the corresponding attributes are first requested.
|
||||
def __getattr__(self, name):
|
||||
if 'available' in self.__dict__:
|
||||
# Initialization was already done, but requested attribute is not available.
|
||||
raise AttributeError(name)
|
||||
|
||||
# Load Qt library info...
|
||||
self._load_tcl_tk_info()
|
||||
# ... and return the requested attribute
|
||||
return getattr(self, name)
|
||||
|
||||
def _load_tcl_tk_info(self):
|
||||
logger.info("%s: initializing cached Tcl/Tk info...", self)
|
||||
|
||||
# Initialize variables so that they might be accessed even if tkinter/Tcl/Tk is unavailable or if initialization
|
||||
# fails for some reason.
|
||||
self.available = False
|
||||
self.tkinter_extension_file = None
|
||||
self.tcl_version = None
|
||||
self.tk_version = None
|
||||
self.tcl_threaded = False
|
||||
self.tcl_data_dir = None
|
||||
|
||||
self.tk_data_dir = None
|
||||
self.tcl_module_dir = None
|
||||
|
||||
self.is_macos_system_framework = False
|
||||
self.tcl_shared_library = None
|
||||
self.tk_shared_library = None
|
||||
|
||||
self.data_files = []
|
||||
|
||||
try:
|
||||
tcl_tk_info = _get_tcl_tk_info()
|
||||
except Exception as e:
|
||||
logger.warning("%s: failed to obtain Tcl/Tk info: %s", self, e)
|
||||
return
|
||||
|
||||
# If tkinter could not be imported, `_get_tcl_tk_info` returns None. In such cases, emit a debug message instead
|
||||
# of a warning, because this initialization might be triggered by a helper function that is trying to determine
|
||||
# availability of `tkinter` by inspecting the `available` attribute.
|
||||
if tcl_tk_info is None:
|
||||
logger.debug("%s: failed to obtain Tcl/Tk info: tkinter/_tkinter could not be imported.", self)
|
||||
return
|
||||
|
||||
# Copy properties
|
||||
for key, value in tcl_tk_info.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
# Parse Tcl/Tk version into (major, minor) tuple.
|
||||
self.tcl_version = tuple((int(x) for x in self.tcl_version.split(".")[:2]))
|
||||
self.tk_version = tuple((int(x) for x in self.tk_version.split(".")[:2]))
|
||||
|
||||
# Determine full path to Tcl and Tk shared libraries against which the `_tkinter` extension module is linked.
|
||||
# This can only be done when `_tkinter` is in fact an extension, and not a built-in. In the latter case, the
|
||||
# Tcl/Tk libraries are statically linked into python shared library, so there are no shared libraries for us
|
||||
# to discover.
|
||||
if self.tkinter_extension_file:
|
||||
try:
|
||||
(
|
||||
self.tcl_shared_library,
|
||||
self.tk_shared_library,
|
||||
) = self._find_tcl_tk_shared_libraries(self.tkinter_extension_file)
|
||||
except Exception:
|
||||
logger.warning("%s: failed to determine Tcl and Tk shared library location!", self, exc_info=True)
|
||||
|
||||
# macOS: check if _tkinter is linked against system-provided Tcl.framework and Tk.framework. This is the
|
||||
# case with python3 from XCode tools (and was the case with very old homebrew python builds). In such cases,
|
||||
# we should not be collecting Tcl/Tk files.
|
||||
if compat.is_darwin:
|
||||
self.is_macos_system_framework = self._check_macos_system_framework(self.tcl_shared_library)
|
||||
|
||||
# Emit a warning in the unlikely event that we are dealing with Teapot-distributed version of ActiveTcl.
|
||||
if not self.is_macos_system_framework:
|
||||
self._warn_if_using_activetcl_or_teapot(self.tcl_data_dir)
|
||||
|
||||
# Infer location of Tk library/data directory. Ideally, we could infer this by running
|
||||
#
|
||||
# import tkinter
|
||||
# root = tkinter.Tk()
|
||||
# tk_data_dir = root.tk.exprstring('$tk_library')
|
||||
#
|
||||
# in the isolated subprocess as part of `_get_tcl_tk_info`. However, that is impractical, as it shows the empty
|
||||
# window, and on some platforms (e.g., linux) requires display server. Therefore, try to guess the location,
|
||||
# based on the following heuristic:
|
||||
# - if TK_LIBRARY is defined use it.
|
||||
# - if Tk is built as macOS framework bundle, look for Scripts sub-directory in Resources directory next to
|
||||
# the shared library.
|
||||
# - otherwise, look for: $tcl_root/../tkX.Y, where X and Y are Tk major and minor version.
|
||||
if "TK_LIBRARY" in os.environ:
|
||||
self.tk_data_dir = os.environ["TK_LIBRARY"]
|
||||
elif compat.is_darwin and self.tk_shared_library and (
|
||||
# is_framework_bundle_lib handles only fully-versioned framework library paths...
|
||||
(osxutils.is_framework_bundle_lib(self.tk_shared_library)) or
|
||||
# ... so manually handle top-level-symlinked variant for now.
|
||||
(self.tk_shared_library).endswith("Tk.framework/Tk")
|
||||
):
|
||||
# Fully resolve the library path, in case it is a top-level symlink; for example, resolve
|
||||
# /Library/Frameworks/Python.framework/Versions/3.13/Frameworks/Tk.framework/Tk
|
||||
# into
|
||||
# /Library/Frameworks/Python.framework/Versions/3.13/Frameworks/Tk.framework/Versions/8.6/Tk
|
||||
tk_lib_realpath = os.path.realpath(self.tk_shared_library)
|
||||
# Resources/Scripts directory next to the shared library
|
||||
self.tk_data_dir = os.path.join(os.path.dirname(tk_lib_realpath), "Resources", "Scripts")
|
||||
else:
|
||||
self.tk_data_dir = os.path.join(
|
||||
os.path.dirname(self.tcl_data_dir),
|
||||
f"tk{self.tk_version[0]}.{self.tk_version[1]}",
|
||||
)
|
||||
|
||||
# Infer location of Tcl module directory. The modules directory is separate from the library/data one, and
|
||||
# is located at $tcl_root/../tclX, where X is the major Tcl version.
|
||||
self.tcl_module_dir = os.path.join(
|
||||
os.path.dirname(self.tcl_data_dir),
|
||||
f"tcl{self.tcl_version[0]}",
|
||||
)
|
||||
|
||||
# Find all data files
|
||||
if self.is_macos_system_framework:
|
||||
logger.info("%s: using macOS system Tcl/Tk framework - not collecting data files.", self)
|
||||
else:
|
||||
# Collect Tcl and Tk scripts from their corresponding library/data directories. See comment at the
|
||||
# definition of TK_ROOTNAME and TK_ROOTNAME variables.
|
||||
if os.path.isdir(self.tcl_data_dir):
|
||||
self.data_files += self._collect_files_from_directory(
|
||||
self.tcl_data_dir,
|
||||
prefix=self.TCL_ROOTNAME,
|
||||
excludes=['demos', '*.lib', 'tclConfig.sh'],
|
||||
)
|
||||
else:
|
||||
logger.warning("%s: Tcl library/data directory %r does not exist!", self, self.tcl_data_dir)
|
||||
|
||||
if os.path.isdir(self.tk_data_dir):
|
||||
self.data_files += self._collect_files_from_directory(
|
||||
self.tk_data_dir,
|
||||
prefix=self.TK_ROOTNAME,
|
||||
excludes=['demos', '*.lib', 'tkConfig.sh'],
|
||||
)
|
||||
else:
|
||||
logger.warning("%s: Tk library/data directory %r does not exist!", self, self.tk_data_dir)
|
||||
|
||||
# Collect Tcl modules from modules directory
|
||||
if os.path.isdir(self.tcl_module_dir):
|
||||
self.data_files += self._collect_files_from_directory(
|
||||
self.tcl_module_dir,
|
||||
prefix=os.path.basename(self.tcl_module_dir),
|
||||
)
|
||||
else:
|
||||
logger.warning("%s: Tcl module directory %r does not exist!", self, self.tcl_module_dir)
|
||||
|
||||
@staticmethod
|
||||
def _collect_files_from_directory(root, prefix=None, excludes=None):
|
||||
"""
|
||||
A minimal port of PyInstaller.building.datastruct.Tree() functionality, which allows us to avoid using Tree
|
||||
here. This way, the TclTkInfo data structure can be used without having PyInstaller's config context set up.
|
||||
"""
|
||||
excludes = excludes or []
|
||||
|
||||
todo = [(root, prefix)]
|
||||
output = []
|
||||
while todo:
|
||||
target_dir, prefix = todo.pop()
|
||||
|
||||
for entry in os.listdir(target_dir):
|
||||
# Basic name-based exclusion
|
||||
if any((fnmatch.fnmatch(entry, exclude) for exclude in excludes)):
|
||||
continue
|
||||
|
||||
src_path = os.path.join(target_dir, entry)
|
||||
dest_path = os.path.join(prefix, entry) if prefix else entry
|
||||
|
||||
if os.path.isdir(src_path):
|
||||
todo.append((src_path, dest_path))
|
||||
else:
|
||||
# Return 3-element tuples with fully-resolved dest path, since other parts of code depend on that.
|
||||
output.append((dest_path, src_path, 'DATA'))
|
||||
|
||||
return output
|
||||
|
||||
@staticmethod
|
||||
def _find_tcl_tk_shared_libraries(tkinter_ext_file):
|
||||
"""
|
||||
Find Tcl and Tk shared libraries against which the _tkinter extension module is linked.
|
||||
"""
|
||||
tcl_lib = None
|
||||
tk_lib = None
|
||||
|
||||
for _, lib_path in bindepend.get_imports(tkinter_ext_file): # (name, fullpath) tuple
|
||||
if lib_path is None:
|
||||
continue # Skip unresolved entries
|
||||
|
||||
# For comparison, take basename of lib_path. On macOS, lib_name returned by get_imports is in fact
|
||||
# referenced name, which is not necessarily just a basename.
|
||||
lib_name = os.path.basename(lib_path)
|
||||
lib_name_lower = lib_name.lower() # lower-case for comparisons
|
||||
|
||||
if 'tcl' in lib_name_lower:
|
||||
tcl_lib = lib_path
|
||||
elif 'tk' in lib_name_lower:
|
||||
tk_lib = lib_path
|
||||
|
||||
return tcl_lib, tk_lib
|
||||
|
||||
@staticmethod
|
||||
def _check_macos_system_framework(tcl_shared_lib):
|
||||
# Starting with macOS 11, system libraries are hidden (unless both Python and PyInstaller's bootloader are built
|
||||
# against macOS 11.x SDK). Therefore, Tcl shared library might end up unresolved (None); but that implicitly
|
||||
# indicates that the system framework is used.
|
||||
if tcl_shared_lib is None:
|
||||
return True
|
||||
|
||||
# Check if the path corresponds to the system framework, i.e., [/System]/Library/Frameworks/Tcl.framework/Tcl
|
||||
return 'Library/Frameworks/Tcl.framework' in tcl_shared_lib
|
||||
|
||||
@staticmethod
|
||||
def _warn_if_using_activetcl_or_teapot(tcl_root):
|
||||
"""
|
||||
Check if Tcl installation is a Teapot-distributed version of ActiveTcl, and log a non-fatal warning that the
|
||||
resulting frozen application will (likely) fail to run on other systems.
|
||||
|
||||
PyInstaller does *not* freeze all ActiveTcl dependencies -- including Teapot, which is typically ignorable.
|
||||
Since Teapot is *not* ignorable in this case, this function warns of impending failure.
|
||||
|
||||
See Also
|
||||
-------
|
||||
https://github.com/pyinstaller/pyinstaller/issues/621
|
||||
"""
|
||||
if tcl_root is None:
|
||||
return
|
||||
|
||||
# Read the "init.tcl" script and look for mentions of "activetcl" and "teapot"
|
||||
init_tcl = os.path.join(tcl_root, 'init.tcl')
|
||||
if not os.path.isfile(init_tcl):
|
||||
return
|
||||
|
||||
mentions_activetcl = False
|
||||
mentions_teapot = False
|
||||
|
||||
# Tcl/Tk reads files using the system encoding (https://www.tcl.tk/doc/howto/i18n.html#system_encoding);
|
||||
# on macOS, this is UTF-8.
|
||||
with open(init_tcl, 'r', encoding='utf8') as fp:
|
||||
for line in fp.readlines():
|
||||
line = line.strip().lower()
|
||||
if line.startswith('#'):
|
||||
continue
|
||||
if 'activetcl' in line:
|
||||
mentions_activetcl = True
|
||||
if 'teapot' in line:
|
||||
mentions_teapot = True
|
||||
if mentions_activetcl and mentions_teapot:
|
||||
break
|
||||
|
||||
if mentions_activetcl and mentions_teapot:
|
||||
logger.warning(
|
||||
"You appear to be using an ActiveTcl build of Tcl/Tk, which PyInstaller has\n"
|
||||
"difficulty freezing. To fix this, comment out all references to 'teapot' in\n"
|
||||
f"{init_tcl!r}\n"
|
||||
"See https://github.com/pyinstaller/pyinstaller/issues/621 for more information."
|
||||
)
|
||||
|
||||
|
||||
tcltk_info = TclTkInfo()
|
||||
Reference in New Issue
Block a user