Remove hardcoded libpython binaries and add debug step
All checks were successful
build / build-linux (push) Successful in 16s
All checks were successful
build / build-linux (push) Successful in 16s
This commit is contained in:
1
venv/lib/python3.12/site-packages/PyInstaller/building/__init__.py
Executable file
1
venv/lib/python3.12/site-packages/PyInstaller/building/__init__.py
Executable file
@@ -0,0 +1 @@
|
||||
#
|
||||
1334
venv/lib/python3.12/site-packages/PyInstaller/building/api.py
Executable file
1334
venv/lib/python3.12/site-packages/PyInstaller/building/api.py
Executable file
File diff suppressed because it is too large
Load Diff
1272
venv/lib/python3.12/site-packages/PyInstaller/building/build_main.py
Executable file
1272
venv/lib/python3.12/site-packages/PyInstaller/building/build_main.py
Executable file
File diff suppressed because it is too large
Load Diff
459
venv/lib/python3.12/site-packages/PyInstaller/building/datastruct.py
Executable file
459
venv/lib/python3.12/site-packages/PyInstaller/building/datastruct.py
Executable file
@@ -0,0 +1,459 @@
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2005-2023, PyInstaller Development Team.
|
||||
#
|
||||
# Distributed under the terms of the GNU General Public License (version 2
|
||||
# or later) with exception for distributing the bootloader.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#
|
||||
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import warnings
|
||||
|
||||
from PyInstaller import log as logging
|
||||
from PyInstaller.building.utils import _check_guts_eq
|
||||
from PyInstaller.utils import misc
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def unique_name(entry):
|
||||
"""
|
||||
Return the filename used to enforce uniqueness for the given TOC entry.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
entry : tuple
|
||||
|
||||
Returns
|
||||
-------
|
||||
unique_name: str
|
||||
"""
|
||||
name, path, typecode = entry
|
||||
if typecode in ('BINARY', 'DATA', 'EXTENSION', 'DEPENDENCY'):
|
||||
name = os.path.normcase(name)
|
||||
|
||||
return name
|
||||
|
||||
|
||||
# This class is deprecated and has been replaced by plain lists with explicit normalization (de-duplication) via
|
||||
# `normalize_toc` and `normalize_pyz_toc` helper functions.
|
||||
class TOC(list):
|
||||
"""
|
||||
TOC (Table of Contents) class is a list of tuples of the form (name, path, typecode).
|
||||
|
||||
typecode name path description
|
||||
--------------------------------------------------------------------------------------
|
||||
EXTENSION Python internal name. Full path name in build. Extension module.
|
||||
PYSOURCE Python internal name. Full path name in build. Script.
|
||||
PYMODULE Python internal name. Full path name in build. Pure Python module (including __init__ modules).
|
||||
PYZ Runtime name. Full path name in build. A .pyz archive (ZlibArchive data structure).
|
||||
PKG Runtime name. Full path name in build. A .pkg archive (Carchive data structure).
|
||||
BINARY Runtime name. Full path name in build. Shared library.
|
||||
DATA Runtime name. Full path name in build. Arbitrary files.
|
||||
OPTION The option. Unused. Python runtime option (frozen into executable).
|
||||
|
||||
A TOC contains various types of files. A TOC contains no duplicates and preserves order.
|
||||
PyInstaller uses TOC data type to collect necessary files bundle them into an executable.
|
||||
"""
|
||||
def __init__(self, initlist=None):
|
||||
super().__init__()
|
||||
|
||||
# Deprecation warning
|
||||
warnings.warn(
|
||||
"TOC class is deprecated. Use a plain list of 3-element tuples instead.",
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
self.filenames = set()
|
||||
if initlist:
|
||||
for entry in initlist:
|
||||
self.append(entry)
|
||||
|
||||
def append(self, entry):
|
||||
if not isinstance(entry, tuple):
|
||||
logger.info("TOC found a %s, not a tuple", entry)
|
||||
raise TypeError("Expected tuple, not %s." % type(entry).__name__)
|
||||
|
||||
unique = unique_name(entry)
|
||||
|
||||
if unique not in self.filenames:
|
||||
self.filenames.add(unique)
|
||||
super().append(entry)
|
||||
|
||||
def insert(self, pos, entry):
|
||||
if not isinstance(entry, tuple):
|
||||
logger.info("TOC found a %s, not a tuple", entry)
|
||||
raise TypeError("Expected tuple, not %s." % type(entry).__name__)
|
||||
unique = unique_name(entry)
|
||||
|
||||
if unique not in self.filenames:
|
||||
self.filenames.add(unique)
|
||||
super().insert(pos, entry)
|
||||
|
||||
def __add__(self, other):
|
||||
result = TOC(self)
|
||||
result.extend(other)
|
||||
return result
|
||||
|
||||
def __radd__(self, other):
|
||||
result = TOC(other)
|
||||
result.extend(self)
|
||||
return result
|
||||
|
||||
def __iadd__(self, other):
|
||||
for entry in other:
|
||||
self.append(entry)
|
||||
return self
|
||||
|
||||
def extend(self, other):
|
||||
# TODO: look if this can be done more efficient with out the loop, e.g. by not using a list as base at all.
|
||||
for entry in other:
|
||||
self.append(entry)
|
||||
|
||||
def __sub__(self, other):
|
||||
# Construct new TOC with entries not contained in the other TOC
|
||||
other = TOC(other)
|
||||
return TOC([entry for entry in self if unique_name(entry) not in other.filenames])
|
||||
|
||||
def __rsub__(self, other):
|
||||
result = TOC(other)
|
||||
return result.__sub__(self)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if isinstance(key, slice):
|
||||
if key == slice(None, None, None):
|
||||
# special case: set the entire list
|
||||
self.filenames = set()
|
||||
self.clear()
|
||||
self.extend(value)
|
||||
return
|
||||
else:
|
||||
raise KeyError("TOC.__setitem__ doesn't handle slices")
|
||||
|
||||
else:
|
||||
old_value = self[key]
|
||||
old_name = unique_name(old_value)
|
||||
self.filenames.remove(old_name)
|
||||
|
||||
new_name = unique_name(value)
|
||||
if new_name not in self.filenames:
|
||||
self.filenames.add(new_name)
|
||||
super(TOC, self).__setitem__(key, value)
|
||||
|
||||
|
||||
class Target:
|
||||
invcnum = 0
|
||||
|
||||
def __init__(self):
|
||||
from PyInstaller.config import CONF
|
||||
|
||||
# Get a (per class) unique number to avoid conflicts between toc objects
|
||||
self.invcnum = self.__class__.invcnum
|
||||
self.__class__.invcnum += 1
|
||||
self.tocfilename = os.path.join(CONF['workpath'], '%s-%02d.toc' % (self.__class__.__name__, self.invcnum))
|
||||
self.tocbasename = os.path.basename(self.tocfilename)
|
||||
self.dependencies = []
|
||||
|
||||
def __postinit__(self):
|
||||
"""
|
||||
Check if the target need to be rebuild and if so, re-assemble.
|
||||
|
||||
`__postinit__` is to be called at the end of `__init__` of every subclass of Target. `__init__` is meant to
|
||||
setup the parameters and `__postinit__` is checking if rebuild is required and in case calls `assemble()`
|
||||
"""
|
||||
logger.info("checking %s", self.__class__.__name__)
|
||||
data = None
|
||||
last_build = misc.mtime(self.tocfilename)
|
||||
if last_build == 0:
|
||||
logger.info("Building %s because %s is non existent", self.__class__.__name__, self.tocbasename)
|
||||
else:
|
||||
try:
|
||||
data = misc.load_py_data_struct(self.tocfilename)
|
||||
except Exception:
|
||||
logger.info("Building because %s is bad", self.tocbasename)
|
||||
else:
|
||||
# create a dict for easier access
|
||||
data = dict(zip((g[0] for g in self._GUTS), data))
|
||||
# assemble if previous data was not found or is outdated
|
||||
if not data or self._check_guts(data, last_build):
|
||||
self.assemble()
|
||||
self._save_guts()
|
||||
|
||||
_GUTS = []
|
||||
|
||||
def _check_guts(self, data, last_build):
|
||||
"""
|
||||
Returns True if rebuild/assemble is required.
|
||||
"""
|
||||
if len(data) != len(self._GUTS):
|
||||
logger.info("Building because %s is bad", self.tocbasename)
|
||||
return True
|
||||
for attr, func in self._GUTS:
|
||||
if func is None:
|
||||
# no check for this value
|
||||
continue
|
||||
if func(attr, data[attr], getattr(self, attr), last_build):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _save_guts(self):
|
||||
"""
|
||||
Save the input parameters and the work-product of this run to maybe avoid regenerating it later.
|
||||
"""
|
||||
data = tuple(getattr(self, g[0]) for g in self._GUTS)
|
||||
misc.save_py_data_struct(self.tocfilename, data)
|
||||
|
||||
|
||||
class Tree(Target, list):
|
||||
"""
|
||||
This class is a way of creating a TOC (Table of Contents) list that describes some or all of the files within a
|
||||
directory.
|
||||
"""
|
||||
def __init__(self, root=None, prefix=None, excludes=None, typecode='DATA'):
|
||||
"""
|
||||
root
|
||||
The root of the tree (on the build system).
|
||||
prefix
|
||||
Optional prefix to the names of the target system.
|
||||
excludes
|
||||
A list of names to exclude. Two forms are allowed:
|
||||
|
||||
name
|
||||
Files with this basename will be excluded (do not include the path).
|
||||
*.ext
|
||||
Any file with the given extension will be excluded.
|
||||
typecode
|
||||
The typecode to be used for all files found in this tree. See the TOC class for for information about
|
||||
the typcodes.
|
||||
"""
|
||||
Target.__init__(self)
|
||||
list.__init__(self)
|
||||
self.root = root
|
||||
self.prefix = prefix
|
||||
self.excludes = excludes
|
||||
self.typecode = typecode
|
||||
if excludes is None:
|
||||
self.excludes = []
|
||||
self.__postinit__()
|
||||
|
||||
_GUTS = ( # input parameters
|
||||
('root', _check_guts_eq),
|
||||
('prefix', _check_guts_eq),
|
||||
('excludes', _check_guts_eq),
|
||||
('typecode', _check_guts_eq),
|
||||
('data', None), # tested below
|
||||
# no calculated/analysed values
|
||||
)
|
||||
|
||||
def _check_guts(self, data, last_build):
|
||||
if Target._check_guts(self, data, last_build):
|
||||
return True
|
||||
# Walk the collected directories as check if they have been changed - which means files have been added or
|
||||
# removed. There is no need to check for the files, since `Tree` is only about the directory contents (which is
|
||||
# the list of files).
|
||||
stack = [data['root']]
|
||||
while stack:
|
||||
d = stack.pop()
|
||||
if misc.mtime(d) > last_build:
|
||||
logger.info("Building %s because directory %s changed", self.tocbasename, d)
|
||||
return True
|
||||
for nm in os.listdir(d):
|
||||
path = os.path.join(d, nm)
|
||||
if os.path.isdir(path):
|
||||
stack.append(path)
|
||||
self[:] = data['data'] # collected files
|
||||
return False
|
||||
|
||||
def _save_guts(self):
|
||||
# Use the attribute `data` to save the list
|
||||
self.data = self
|
||||
super()._save_guts()
|
||||
del self.data
|
||||
|
||||
def assemble(self):
|
||||
logger.info("Building Tree %s", self.tocbasename)
|
||||
stack = [(self.root, self.prefix)]
|
||||
excludes = set()
|
||||
xexcludes = set()
|
||||
for name in self.excludes:
|
||||
if name.startswith('*'):
|
||||
xexcludes.add(name[1:])
|
||||
else:
|
||||
excludes.add(name)
|
||||
result = []
|
||||
while stack:
|
||||
dir, prefix = stack.pop()
|
||||
for filename in os.listdir(dir):
|
||||
if filename in excludes:
|
||||
continue
|
||||
ext = os.path.splitext(filename)[1]
|
||||
if ext in xexcludes:
|
||||
continue
|
||||
fullfilename = os.path.join(dir, filename)
|
||||
if prefix:
|
||||
resfilename = os.path.join(prefix, filename)
|
||||
else:
|
||||
resfilename = filename
|
||||
if os.path.isdir(fullfilename):
|
||||
stack.append((fullfilename, resfilename))
|
||||
else:
|
||||
result.append((resfilename, fullfilename, self.typecode))
|
||||
self[:] = result
|
||||
|
||||
|
||||
def normalize_toc(toc):
|
||||
# Default priority: 0
|
||||
_TOC_TYPE_PRIORITIES = {
|
||||
# DEPENDENCY entries need to replace original entries, so they need the highest priority.
|
||||
'DEPENDENCY': 3,
|
||||
# SYMLINK entries have higher priority than other regular entries
|
||||
'SYMLINK': 2,
|
||||
# BINARY/EXTENSION entries undergo additional processing, so give them precedence over DATA and other entries.
|
||||
'BINARY': 1,
|
||||
'EXTENSION': 1,
|
||||
}
|
||||
|
||||
def _type_case_normalization_fcn(typecode):
|
||||
# Case-normalize all entries except OPTION.
|
||||
return typecode not in {
|
||||
"OPTION",
|
||||
}
|
||||
|
||||
return _normalize_toc(toc, _TOC_TYPE_PRIORITIES, _type_case_normalization_fcn)
|
||||
|
||||
|
||||
def normalize_pyz_toc(toc):
|
||||
# Default priority: 0
|
||||
_TOC_TYPE_PRIORITIES = {
|
||||
# Ensure that entries with higher optimization level take precedence.
|
||||
'PYMODULE-2': 2,
|
||||
'PYMODULE-1': 1,
|
||||
'PYMODULE': 0,
|
||||
}
|
||||
|
||||
return _normalize_toc(toc, _TOC_TYPE_PRIORITIES)
|
||||
|
||||
|
||||
def _normalize_toc(toc, toc_type_priorities, type_case_normalization_fcn=lambda typecode: False):
|
||||
options_toc = []
|
||||
tmp_toc = dict()
|
||||
for dest_name, src_name, typecode in toc:
|
||||
# Exempt OPTION entries from de-duplication processing. Some options might allow being specified multiple times.
|
||||
if typecode == 'OPTION':
|
||||
options_toc.append(((dest_name, src_name, typecode)))
|
||||
continue
|
||||
|
||||
# Always sanitize the dest_name with `os.path.normpath` to remove any local loops with parent directory path
|
||||
# components. `pathlib` does not seem to offer equivalent functionality.
|
||||
dest_name = os.path.normpath(dest_name)
|
||||
|
||||
# Normalize the destination name for uniqueness. Use `pathlib.PurePath` to ensure that keys are both
|
||||
# case-normalized (on OSes where applicable) and directory-separator normalized (just in case).
|
||||
if type_case_normalization_fcn(typecode):
|
||||
entry_key = pathlib.PurePath(dest_name)
|
||||
else:
|
||||
entry_key = dest_name
|
||||
|
||||
existing_entry = tmp_toc.get(entry_key)
|
||||
if existing_entry is None:
|
||||
# Entry does not exist - insert
|
||||
tmp_toc[entry_key] = (dest_name, src_name, typecode)
|
||||
else:
|
||||
# Entry already exists - replace if its typecode has higher priority
|
||||
_, _, existing_typecode = existing_entry
|
||||
if toc_type_priorities.get(typecode, 0) > toc_type_priorities.get(existing_typecode, 0):
|
||||
tmp_toc[entry_key] = (dest_name, src_name, typecode)
|
||||
|
||||
# Return the items as list. The order matches the original order due to python dict maintaining the insertion order.
|
||||
# The exception are OPTION entries, which are now placed at the beginning of the TOC.
|
||||
return options_toc + list(tmp_toc.values())
|
||||
|
||||
|
||||
def toc_process_symbolic_links(toc):
|
||||
"""
|
||||
Process TOC entries and replace entries whose files are symbolic links with SYMLINK entries (provided original file
|
||||
is also being collected).
|
||||
"""
|
||||
# Dictionary of all destination names, for a fast look-up.
|
||||
all_dest_files = set([dest_name for dest_name, src_name, typecode in toc])
|
||||
|
||||
# Process the TOC to create SYMLINK entries
|
||||
new_toc = []
|
||||
for entry in toc:
|
||||
dest_name, src_name, typecode = entry
|
||||
|
||||
# Skip entries that are already symbolic links
|
||||
if typecode == 'SYMLINK':
|
||||
new_toc.append(entry)
|
||||
continue
|
||||
|
||||
# Skip entries without valid source name (e.g., OPTION)
|
||||
if not src_name:
|
||||
new_toc.append(entry)
|
||||
continue
|
||||
|
||||
# Source path is not a symbolic link (i.e., it is a regular file or directory)
|
||||
if not os.path.islink(src_name):
|
||||
new_toc.append(entry)
|
||||
continue
|
||||
|
||||
# Try preserving the symbolic link, under strict relative-relationship-preservation check
|
||||
symlink_entry = _try_preserving_symbolic_link(dest_name, src_name, all_dest_files)
|
||||
|
||||
if symlink_entry:
|
||||
new_toc.append(symlink_entry)
|
||||
else:
|
||||
new_toc.append(entry)
|
||||
|
||||
return new_toc
|
||||
|
||||
|
||||
def _try_preserving_symbolic_link(dest_name, src_name, all_dest_files):
|
||||
seen_src_files = set()
|
||||
|
||||
# Set initial values for the loop
|
||||
ref_src_file = src_name
|
||||
ref_dest_file = dest_name
|
||||
|
||||
while True:
|
||||
# Guard against cyclic links...
|
||||
if ref_src_file in seen_src_files:
|
||||
break
|
||||
seen_src_files.add(ref_src_file)
|
||||
|
||||
# Stop when referenced source file is not a symbolic link anymore.
|
||||
if not os.path.islink(ref_src_file):
|
||||
break
|
||||
|
||||
# Read the symbolic link's target, but do not fully resolve it using os.path.realpath(), because there might be
|
||||
# other symbolic links involved as well (for example, /lib64 -> /usr/lib64 whereas we are processing
|
||||
# /lib64/liba.so -> /lib64/liba.so.1)
|
||||
symlink_target = os.readlink(ref_src_file)
|
||||
if os.path.isabs(symlink_target):
|
||||
break # We support only relative symbolic links.
|
||||
|
||||
ref_dest_file = os.path.join(os.path.dirname(ref_dest_file), symlink_target)
|
||||
ref_dest_file = os.path.normpath(ref_dest_file) # remove any '..'
|
||||
|
||||
ref_src_file = os.path.join(os.path.dirname(ref_src_file), symlink_target)
|
||||
ref_src_file = os.path.normpath(ref_src_file) # remove any '..'
|
||||
|
||||
# Check if referenced destination file is valid (i.e., we are collecting a file under referenced name).
|
||||
if ref_dest_file in all_dest_files:
|
||||
# Sanity check: original source name and current referenced source name must, after complete resolution,
|
||||
# point to the same file.
|
||||
if os.path.realpath(src_name) == os.path.realpath(ref_src_file):
|
||||
# Compute relative link for the destination file (might be modified, if we went over non-collected
|
||||
# intermediate links).
|
||||
rel_link = os.path.relpath(ref_dest_file, os.path.dirname(dest_name))
|
||||
return dest_name, rel_link, 'SYMLINK'
|
||||
|
||||
# If referenced destination is not valid, do another iteration in case we are dealing with chained links and we
|
||||
# are not collecting an intermediate link...
|
||||
|
||||
return None
|
||||
90
venv/lib/python3.12/site-packages/PyInstaller/building/icon.py
Executable file
90
venv/lib/python3.12/site-packages/PyInstaller/building/icon.py
Executable file
@@ -0,0 +1,90 @@
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2022-2023, PyInstaller Development Team.
|
||||
#
|
||||
# Distributed under the terms of the GNU General Public License (version 2
|
||||
# or later) with exception for distributing the bootloader.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#
|
||||
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from typing import Tuple
|
||||
|
||||
import os
|
||||
import hashlib
|
||||
|
||||
|
||||
def normalize_icon_type(icon_path: str, allowed_types: Tuple[str], convert_type: str, workpath: str) -> str:
|
||||
"""
|
||||
Returns a valid icon path or raises an Exception on error.
|
||||
Ensures that the icon exists, and, if necessary, attempts to convert it to correct OS-specific format using Pillow.
|
||||
|
||||
Takes:
|
||||
icon_path - the icon given by the user
|
||||
allowed_types - a tuple of icon formats that should be allowed through
|
||||
EX: ("ico", "exe")
|
||||
convert_type - the type to attempt conversion too if necessary
|
||||
EX: "icns"
|
||||
workpath - the temp directory to save any newly generated image files
|
||||
"""
|
||||
|
||||
# explicitly error if file not found
|
||||
if not os.path.exists(icon_path):
|
||||
raise FileNotFoundError(f"Icon input file {icon_path} not found")
|
||||
|
||||
_, extension = os.path.splitext(icon_path)
|
||||
extension = extension[1:] # get rid of the "." in ".whatever"
|
||||
|
||||
# if the file is already in the right format, pass it back unchanged
|
||||
if extension in allowed_types:
|
||||
# Check both the suffix and the header of the file to guard against the user confusing image types.
|
||||
signatures = hex_signatures[extension]
|
||||
with open(icon_path, "rb") as f:
|
||||
header = f.read(max(len(s) for s in signatures))
|
||||
if any(list(header)[:len(s)] == s for s in signatures):
|
||||
return icon_path
|
||||
|
||||
# The icon type is wrong! Let's try and import PIL
|
||||
try:
|
||||
from PIL import Image as PILImage
|
||||
import PIL
|
||||
|
||||
except ImportError:
|
||||
raise ValueError(
|
||||
f"Received icon image '{icon_path}' which exists but is not in the correct format. On this platform, "
|
||||
f"only {allowed_types} images may be used as icons. If Pillow is installed, automatic conversion will "
|
||||
f"be attempted. Please install Pillow or convert your '{extension}' file to one of {allowed_types} "
|
||||
f"and try again."
|
||||
)
|
||||
|
||||
# Let's try to use PIL to convert the icon file type
|
||||
try:
|
||||
_generated_name = f"generated-{hashlib.sha256(icon_path.encode()).hexdigest()}.{convert_type}"
|
||||
generated_icon = os.path.join(workpath, _generated_name)
|
||||
with PILImage.open(icon_path) as im:
|
||||
# If an image uses a custom palette + transparency, convert it to RGBA for a better alpha mask depth.
|
||||
if im.mode == "P" and im.info.get("transparency", None) is not None:
|
||||
# The bit depth of the alpha channel will be higher, and the images will look better when eventually
|
||||
# scaled to multiple sizes (16,24,32,..) for the ICO format for example.
|
||||
im = im.convert("RGBA")
|
||||
im.save(generated_icon)
|
||||
icon_path = generated_icon
|
||||
except PIL.UnidentifiedImageError:
|
||||
raise ValueError(
|
||||
f"Something went wrong converting icon image '{icon_path}' to '.{convert_type}' with Pillow, "
|
||||
f"perhaps the image format is unsupported. Try again with a different file or use a file that can "
|
||||
f"be used without conversion on this platform: {allowed_types}"
|
||||
)
|
||||
|
||||
return icon_path
|
||||
|
||||
|
||||
# Possible initial bytes of icon types PyInstaller needs to be able to recognise.
|
||||
# Taken from: https://en.wikipedia.org/wiki/List_of_file_signatures
|
||||
hex_signatures = {
|
||||
"png": [[0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A]],
|
||||
"exe": [[0x4D, 0x5A], [0x5A, 0x4D]],
|
||||
"ico": [[0x00, 0x00, 0x01, 0x00]],
|
||||
"icns": [[0x69, 0x63, 0x6e, 0x73]],
|
||||
}
|
||||
909
venv/lib/python3.12/site-packages/PyInstaller/building/makespec.py
Executable file
909
venv/lib/python3.12/site-packages/PyInstaller/building/makespec.py
Executable file
@@ -0,0 +1,909 @@
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2005-2023, PyInstaller Development Team.
|
||||
#
|
||||
# Distributed under the terms of the GNU General Public License (version 2
|
||||
# or later) with exception for distributing the bootloader.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#
|
||||
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
|
||||
#-----------------------------------------------------------------------------
|
||||
"""
|
||||
Automatically build spec files containing a description of the project.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
from PyInstaller import DEFAULT_SPECPATH, HOMEPATH
|
||||
from PyInstaller import log as logging
|
||||
from PyInstaller.building.templates import bundleexetmplt, bundletmplt, onedirtmplt, onefiletmplt, splashtmpl
|
||||
from PyInstaller.compat import is_darwin, is_win
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# This list gives valid choices for the ``--debug`` command-line option, except for the ``all`` choice.
|
||||
DEBUG_ARGUMENT_CHOICES = ['imports', 'bootloader', 'noarchive']
|
||||
# This is the ``all`` choice.
|
||||
DEBUG_ALL_CHOICE = ['all']
|
||||
|
||||
|
||||
def escape_win_filepath(path):
|
||||
# escape all \ with another \ after using normpath to clean up the path
|
||||
return os.path.normpath(path).replace('\\', '\\\\')
|
||||
|
||||
|
||||
def make_path_spec_relative(filename, spec_dir):
|
||||
"""
|
||||
Make the filename relative to the directory containing .spec file if filename is relative and not absolute.
|
||||
Otherwise keep filename untouched.
|
||||
"""
|
||||
if os.path.isabs(filename):
|
||||
return filename
|
||||
else:
|
||||
filename = os.path.abspath(filename)
|
||||
# Make it relative.
|
||||
filename = os.path.relpath(filename, start=spec_dir)
|
||||
return filename
|
||||
|
||||
|
||||
# Support for trying to avoid hard-coded paths in the .spec files. Eg, all files rooted in the Installer directory tree
|
||||
# will be written using "HOMEPATH", thus allowing this spec file to be used with any Installer installation. Same thing
|
||||
# could be done for other paths too.
|
||||
path_conversions = ((HOMEPATH, "HOMEPATH"),)
|
||||
|
||||
|
||||
class SourceDestAction(argparse.Action):
|
||||
"""
|
||||
A command line option which takes multiple source:dest pairs.
|
||||
"""
|
||||
def __init__(self, *args, default=None, metavar=None, **kwargs):
|
||||
super().__init__(*args, default=[], metavar='SOURCE:DEST', **kwargs)
|
||||
|
||||
def __call__(self, parser, namespace, value, option_string=None):
|
||||
try:
|
||||
# Find the only separator that isn't a Windows drive.
|
||||
separator, = (m for m in re.finditer(rf"(^\w:[/\\])|[:{os.pathsep}]", value) if not m[1])
|
||||
except ValueError:
|
||||
# Split into SRC and DEST failed, wrong syntax
|
||||
raise argparse.ArgumentError(self, f'Wrong syntax, should be {self.option_strings[0]}=SOURCE:DEST')
|
||||
src = value[:separator.start()]
|
||||
dest = value[separator.end():]
|
||||
if not src or not dest:
|
||||
# Syntax was correct, but one or both of SRC and DEST was not given
|
||||
raise argparse.ArgumentError(self, "You have to specify both SOURCE and DEST")
|
||||
|
||||
# argparse is not particularly smart with copy by reference typed defaults. If the current list is the default,
|
||||
# replace it before modifying it to avoid changing the default.
|
||||
if getattr(namespace, self.dest) is self.default:
|
||||
setattr(namespace, self.dest, [])
|
||||
getattr(namespace, self.dest).append((src, dest))
|
||||
|
||||
|
||||
def make_variable_path(filename, conversions=path_conversions):
|
||||
if not os.path.isabs(filename):
|
||||
# os.path.commonpath can not compare relative and absolute paths, and if filename is not absolute, none of the
|
||||
# paths in conversions will match anyway.
|
||||
return None, filename
|
||||
for (from_path, to_name) in conversions:
|
||||
assert os.path.abspath(from_path) == from_path, ("path '%s' should already be absolute" % from_path)
|
||||
try:
|
||||
common_path = os.path.commonpath([filename, from_path])
|
||||
except ValueError:
|
||||
# Per https://docs.python.org/3/library/os.path.html#os.path.commonpath, this raises ValueError in several
|
||||
# cases which prevent computing a common path.
|
||||
common_path = None
|
||||
if common_path == from_path:
|
||||
rest = filename[len(from_path):]
|
||||
if rest.startswith(('\\', '/')):
|
||||
rest = rest[1:]
|
||||
return to_name, rest
|
||||
return None, filename
|
||||
|
||||
|
||||
def removed_key_option(x):
|
||||
from PyInstaller.exceptions import RemovedCipherFeatureError
|
||||
raise RemovedCipherFeatureError("Please remove your --key=xxx argument.")
|
||||
|
||||
|
||||
class _RemovedFlagAction(argparse.Action):
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs["help"] = argparse.SUPPRESS
|
||||
kwargs["nargs"] = 0
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
class _RemovedNoEmbedManifestAction(_RemovedFlagAction):
|
||||
def __call__(self, *args, **kwargs):
|
||||
from PyInstaller.exceptions import RemovedExternalManifestError
|
||||
raise RemovedExternalManifestError("Please remove your --no-embed-manifest argument.")
|
||||
|
||||
|
||||
class _RemovedWinPrivateAssembliesAction(_RemovedFlagAction):
|
||||
def __call__(self, *args, **kwargs):
|
||||
from PyInstaller.exceptions import RemovedWinSideBySideSupportError
|
||||
raise RemovedWinSideBySideSupportError("Please remove your --win-private-assemblies argument.")
|
||||
|
||||
|
||||
class _RemovedWinNoPreferRedirectsAction(_RemovedFlagAction):
|
||||
def __call__(self, *args, **kwargs):
|
||||
from PyInstaller.exceptions import RemovedWinSideBySideSupportError
|
||||
raise RemovedWinSideBySideSupportError("Please remove your --win-no-prefer-redirects argument.")
|
||||
|
||||
|
||||
# An object used in place of a "path string", which knows how to repr() itself using variable names instead of
|
||||
# hard-coded paths.
|
||||
class Path:
|
||||
def __init__(self, *parts):
|
||||
self.path = os.path.join(*parts)
|
||||
self.variable_prefix = self.filename_suffix = None
|
||||
|
||||
def __repr__(self):
|
||||
if self.filename_suffix is None:
|
||||
self.variable_prefix, self.filename_suffix = make_variable_path(self.path)
|
||||
if self.variable_prefix is None:
|
||||
return repr(self.path)
|
||||
return "os.path.join(" + self.variable_prefix + "," + repr(self.filename_suffix) + ")"
|
||||
|
||||
|
||||
# An object used to construct extra preamble for the spec file, in order to accommodate extra collect_*() calls from the
|
||||
# command-line
|
||||
class Preamble:
|
||||
def __init__(
|
||||
self, datas, binaries, hiddenimports, collect_data, collect_binaries, collect_submodules, collect_all,
|
||||
copy_metadata, recursive_copy_metadata
|
||||
):
|
||||
# Initialize with literal values - will be switched to preamble variable name later, if necessary
|
||||
self.binaries = binaries or []
|
||||
self.hiddenimports = hiddenimports or []
|
||||
self.datas = datas or []
|
||||
# Preamble content
|
||||
self.content = []
|
||||
|
||||
# Import statements
|
||||
if collect_data:
|
||||
self._add_hookutil_import('collect_data_files')
|
||||
if collect_binaries:
|
||||
self._add_hookutil_import('collect_dynamic_libs')
|
||||
if collect_submodules:
|
||||
self._add_hookutil_import('collect_submodules')
|
||||
if collect_all:
|
||||
self._add_hookutil_import('collect_all')
|
||||
if copy_metadata or recursive_copy_metadata:
|
||||
self._add_hookutil_import('copy_metadata')
|
||||
if self.content:
|
||||
self.content += [''] # empty line to separate the section
|
||||
# Variables
|
||||
if collect_data or copy_metadata or collect_all or recursive_copy_metadata:
|
||||
self._add_var('datas', self.datas)
|
||||
self.datas = 'datas' # switch to variable
|
||||
if collect_binaries or collect_all:
|
||||
self._add_var('binaries', self.binaries)
|
||||
self.binaries = 'binaries' # switch to variable
|
||||
if collect_submodules or collect_all:
|
||||
self._add_var('hiddenimports', self.hiddenimports)
|
||||
self.hiddenimports = 'hiddenimports' # switch to variable
|
||||
# Content - collect_data_files
|
||||
for entry in collect_data:
|
||||
self._add_collect_data(entry)
|
||||
# Content - copy_metadata
|
||||
for entry in copy_metadata:
|
||||
self._add_copy_metadata(entry)
|
||||
# Content - copy_metadata(..., recursive=True)
|
||||
for entry in recursive_copy_metadata:
|
||||
self._add_recursive_copy_metadata(entry)
|
||||
# Content - collect_binaries
|
||||
for entry in collect_binaries:
|
||||
self._add_collect_binaries(entry)
|
||||
# Content - collect_submodules
|
||||
for entry in collect_submodules:
|
||||
self._add_collect_submodules(entry)
|
||||
# Content - collect_all
|
||||
for entry in collect_all:
|
||||
self._add_collect_all(entry)
|
||||
# Merge
|
||||
if self.content and self.content[-1] != '':
|
||||
self.content += [''] # empty line
|
||||
self.content = '\n'.join(self.content)
|
||||
|
||||
def _add_hookutil_import(self, name):
|
||||
self.content += ['from PyInstaller.utils.hooks import {0}'.format(name)]
|
||||
|
||||
def _add_var(self, name, initial_value):
|
||||
self.content += ['{0} = {1}'.format(name, initial_value)]
|
||||
|
||||
def _add_collect_data(self, name):
|
||||
self.content += ['datas += collect_data_files(\'{0}\')'.format(name)]
|
||||
|
||||
def _add_copy_metadata(self, name):
|
||||
self.content += ['datas += copy_metadata(\'{0}\')'.format(name)]
|
||||
|
||||
def _add_recursive_copy_metadata(self, name):
|
||||
self.content += ['datas += copy_metadata(\'{0}\', recursive=True)'.format(name)]
|
||||
|
||||
def _add_collect_binaries(self, name):
|
||||
self.content += ['binaries += collect_dynamic_libs(\'{0}\')'.format(name)]
|
||||
|
||||
def _add_collect_submodules(self, name):
|
||||
self.content += ['hiddenimports += collect_submodules(\'{0}\')'.format(name)]
|
||||
|
||||
def _add_collect_all(self, name):
|
||||
self.content += [
|
||||
'tmp_ret = collect_all(\'{0}\')'.format(name),
|
||||
'datas += tmp_ret[0]; binaries += tmp_ret[1]; hiddenimports += tmp_ret[2]'
|
||||
]
|
||||
|
||||
|
||||
def __add_options(parser):
|
||||
"""
|
||||
Add the `Makespec` options to a option-parser instance or a option group.
|
||||
"""
|
||||
g = parser.add_argument_group('What to generate')
|
||||
g.add_argument(
|
||||
"-D",
|
||||
"--onedir",
|
||||
dest="onefile",
|
||||
action="store_false",
|
||||
default=None,
|
||||
help="Create a one-folder bundle containing an executable (default)",
|
||||
)
|
||||
g.add_argument(
|
||||
"-F",
|
||||
"--onefile",
|
||||
dest="onefile",
|
||||
action="store_true",
|
||||
default=None,
|
||||
help="Create a one-file bundled executable.",
|
||||
)
|
||||
g.add_argument(
|
||||
"--specpath",
|
||||
metavar="DIR",
|
||||
help="Folder to store the generated spec file (default: current directory)",
|
||||
)
|
||||
g.add_argument(
|
||||
"-n",
|
||||
"--name",
|
||||
help="Name to assign to the bundled app and spec file (default: first script's basename)",
|
||||
)
|
||||
g.add_argument(
|
||||
"--contents-directory",
|
||||
help="For onedir builds only, specify the name of the directory in which all supporting files (i.e. everything "
|
||||
"except the executable itself) will be placed in. Use \".\" to re-enable old onedir layout without contents "
|
||||
"directory.",
|
||||
)
|
||||
|
||||
g = parser.add_argument_group('What to bundle, where to search')
|
||||
g.add_argument(
|
||||
'--add-data',
|
||||
action=SourceDestAction,
|
||||
dest='datas',
|
||||
help="Additional data files or directories containing data files to be added to the application. The argument "
|
||||
'value should be in form of "source:dest_dir", where source is the path to file (or directory) to be '
|
||||
"collected, dest_dir is the destination directory relative to the top-level application directory, and both "
|
||||
"paths are separated by a colon (:). To put a file in the top-level application directory, use . as a "
|
||||
"dest_dir. This option can be used multiple times."
|
||||
)
|
||||
g.add_argument(
|
||||
'--add-binary',
|
||||
action=SourceDestAction,
|
||||
dest="binaries",
|
||||
help='Additional binary files to be added to the executable. See the ``--add-data`` option for the format. '
|
||||
'This option can be used multiple times.',
|
||||
)
|
||||
g.add_argument(
|
||||
"-p",
|
||||
"--paths",
|
||||
dest="pathex",
|
||||
metavar="DIR",
|
||||
action="append",
|
||||
default=[],
|
||||
help="A path to search for imports (like using PYTHONPATH). Multiple paths are allowed, separated by ``%s``, "
|
||||
"or use this option multiple times. Equivalent to supplying the ``pathex`` argument in the spec file." %
|
||||
repr(os.pathsep),
|
||||
)
|
||||
g.add_argument(
|
||||
'--hidden-import',
|
||||
'--hiddenimport',
|
||||
action='append',
|
||||
default=[],
|
||||
metavar="MODULENAME",
|
||||
dest='hiddenimports',
|
||||
help='Name an import not visible in the code of the script(s). This option can be used multiple times.',
|
||||
)
|
||||
g.add_argument(
|
||||
'--collect-submodules',
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="MODULENAME",
|
||||
dest='collect_submodules',
|
||||
help='Collect all submodules from the specified package or module. This option can be used multiple times.',
|
||||
)
|
||||
g.add_argument(
|
||||
'--collect-data',
|
||||
'--collect-datas',
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="MODULENAME",
|
||||
dest='collect_data',
|
||||
help='Collect all data from the specified package or module. This option can be used multiple times.',
|
||||
)
|
||||
g.add_argument(
|
||||
'--collect-binaries',
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="MODULENAME",
|
||||
dest='collect_binaries',
|
||||
help='Collect all binaries from the specified package or module. This option can be used multiple times.',
|
||||
)
|
||||
g.add_argument(
|
||||
'--collect-all',
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="MODULENAME",
|
||||
dest='collect_all',
|
||||
help='Collect all submodules, data files, and binaries from the specified package or module. This option can '
|
||||
'be used multiple times.',
|
||||
)
|
||||
g.add_argument(
|
||||
'--copy-metadata',
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="PACKAGENAME",
|
||||
dest='copy_metadata',
|
||||
help='Copy metadata for the specified package. This option can be used multiple times.',
|
||||
)
|
||||
g.add_argument(
|
||||
'--recursive-copy-metadata',
|
||||
action="append",
|
||||
default=[],
|
||||
metavar="PACKAGENAME",
|
||||
dest='recursive_copy_metadata',
|
||||
help='Copy metadata for the specified package and all its dependencies. This option can be used multiple '
|
||||
'times.',
|
||||
)
|
||||
g.add_argument(
|
||||
"--additional-hooks-dir",
|
||||
action="append",
|
||||
dest="hookspath",
|
||||
default=[],
|
||||
help="An additional path to search for hooks. This option can be used multiple times.",
|
||||
)
|
||||
g.add_argument(
|
||||
'--runtime-hook',
|
||||
action='append',
|
||||
dest='runtime_hooks',
|
||||
default=[],
|
||||
help='Path to a custom runtime hook file. A runtime hook is code that is bundled with the executable and is '
|
||||
'executed before any other code or module to set up special features of the runtime environment. This option '
|
||||
'can be used multiple times.',
|
||||
)
|
||||
g.add_argument(
|
||||
'--exclude-module',
|
||||
dest='excludes',
|
||||
action='append',
|
||||
default=[],
|
||||
help='Optional module or package (the Python name, not the path name) that will be ignored (as though it was '
|
||||
'not found). This option can be used multiple times.',
|
||||
)
|
||||
g.add_argument(
|
||||
'--key',
|
||||
dest='key',
|
||||
help=argparse.SUPPRESS,
|
||||
type=removed_key_option,
|
||||
)
|
||||
g.add_argument(
|
||||
'--splash',
|
||||
dest='splash',
|
||||
metavar="IMAGE_FILE",
|
||||
help="(EXPERIMENTAL) Add an splash screen with the image IMAGE_FILE to the application. The splash screen can "
|
||||
"display progress updates while unpacking.",
|
||||
)
|
||||
|
||||
g = parser.add_argument_group('How to generate')
|
||||
g.add_argument(
|
||||
"-d",
|
||||
"--debug",
|
||||
# If this option is not specified, then its default value is an empty list (no debug options selected).
|
||||
default=[],
|
||||
# Note that ``nargs`` is omitted. This produces a single item not stored in a list, as opposed to a list
|
||||
# containing one item, as per `nargs <https://docs.python.org/3/library/argparse.html#nargs>`_.
|
||||
nargs=None,
|
||||
# The options specified must come from this list.
|
||||
choices=DEBUG_ALL_CHOICE + DEBUG_ARGUMENT_CHOICES,
|
||||
# Append choice, rather than storing them (which would overwrite any previous selections).
|
||||
action='append',
|
||||
# Allow newlines in the help text; see the ``_SmartFormatter`` in ``__main__.py``.
|
||||
help=(
|
||||
"R|Provide assistance with debugging a frozen\n"
|
||||
"application. This argument may be provided multiple\n"
|
||||
"times to select several of the following options.\n"
|
||||
"\n"
|
||||
"- all: All three of the following options.\n"
|
||||
"\n"
|
||||
"- imports: specify the -v option to the underlying\n"
|
||||
" Python interpreter, causing it to print a message\n"
|
||||
" each time a module is initialized, showing the\n"
|
||||
" place (filename or built-in module) from which it\n"
|
||||
" is loaded. See\n"
|
||||
" https://docs.python.org/3/using/cmdline.html#id4.\n"
|
||||
"\n"
|
||||
"- bootloader: tell the bootloader to issue progress\n"
|
||||
" messages while initializing and starting the\n"
|
||||
" bundled app. Used to diagnose problems with\n"
|
||||
" missing imports.\n"
|
||||
"\n"
|
||||
"- noarchive: instead of storing all frozen Python\n"
|
||||
" source files as an archive inside the resulting\n"
|
||||
" executable, store them as files in the resulting\n"
|
||||
" output directory.\n"
|
||||
"\n"
|
||||
),
|
||||
)
|
||||
g.add_argument(
|
||||
'--optimize',
|
||||
dest='optimize',
|
||||
metavar='LEVEL',
|
||||
type=int,
|
||||
choices={-1, 0, 1, 2},
|
||||
default=None,
|
||||
help='Bytecode optimization level used for collected python modules and scripts. For details, see the section '
|
||||
'“Bytecode Optimization Level” in PyInstaller manual.',
|
||||
)
|
||||
g.add_argument(
|
||||
'--python-option',
|
||||
dest='python_options',
|
||||
metavar='PYTHON_OPTION',
|
||||
action='append',
|
||||
default=[],
|
||||
help='Specify a command-line option to pass to the Python interpreter at runtime. Currently supports '
|
||||
'"v" (equivalent to "--debug imports"), "u", "W <warning control>", "X <xoption>", and "hash_seed=<value>". '
|
||||
'For details, see the section "Specifying Python Interpreter Options" in PyInstaller manual.',
|
||||
)
|
||||
g.add_argument(
|
||||
"-s",
|
||||
"--strip",
|
||||
action="store_true",
|
||||
help="Apply a symbol-table strip to the executable and shared libs (not recommended for Windows)",
|
||||
)
|
||||
g.add_argument(
|
||||
"--noupx",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Do not use UPX even if it is available (works differently between Windows and *nix)",
|
||||
)
|
||||
g.add_argument(
|
||||
"--upx-exclude",
|
||||
dest="upx_exclude",
|
||||
metavar="FILE",
|
||||
action="append",
|
||||
help="Prevent a binary from being compressed when using upx. This is typically used if upx corrupts certain "
|
||||
"binaries during compression. FILE is the filename of the binary without path. This option can be used "
|
||||
"multiple times.",
|
||||
)
|
||||
|
||||
g = parser.add_argument_group('Windows and macOS specific options')
|
||||
g.add_argument(
|
||||
"-c",
|
||||
"--console",
|
||||
"--nowindowed",
|
||||
dest="console",
|
||||
action="store_true",
|
||||
default=None,
|
||||
help="Open a console window for standard i/o (default). On Windows this option has no effect if the first "
|
||||
"script is a '.pyw' file.",
|
||||
)
|
||||
g.add_argument(
|
||||
"-w",
|
||||
"--windowed",
|
||||
"--noconsole",
|
||||
dest="console",
|
||||
action="store_false",
|
||||
default=None,
|
||||
help="Windows and macOS: do not provide a console window for standard i/o. On macOS this also triggers "
|
||||
"building a macOS .app bundle. On Windows this option is automatically set if the first script is a '.pyw' "
|
||||
"file. This option is ignored on *NIX systems.",
|
||||
)
|
||||
g.add_argument(
|
||||
"--hide-console",
|
||||
type=str,
|
||||
choices={'hide-early', 'hide-late', 'minimize-early', 'minimize-late'},
|
||||
default=None,
|
||||
help="Windows only: in console-enabled executable, have bootloader automatically hide or minimize the console "
|
||||
"window if the program owns the console window (i.e., was not launched from an existing console window).",
|
||||
)
|
||||
g.add_argument(
|
||||
"-i",
|
||||
"--icon",
|
||||
action='append',
|
||||
dest="icon_file",
|
||||
metavar='<FILE.ico or FILE.exe,ID or FILE.icns or Image or "NONE">',
|
||||
help="FILE.ico: apply the icon to a Windows executable. FILE.exe,ID: extract the icon with ID from an exe. "
|
||||
"FILE.icns: apply the icon to the .app bundle on macOS. If an image file is entered that isn't in the "
|
||||
"platform format (ico on Windows, icns on Mac), PyInstaller tries to use Pillow to translate the icon into "
|
||||
"the correct format (if Pillow is installed). Use \"NONE\" to not apply any icon, thereby making the OS show "
|
||||
"some default (default: apply PyInstaller's icon). This option can be used multiple times.",
|
||||
)
|
||||
g.add_argument(
|
||||
"--disable-windowed-traceback",
|
||||
dest="disable_windowed_traceback",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Disable traceback dump of unhandled exception in windowed (noconsole) mode (Windows and macOS only), "
|
||||
"and instead display a message that this feature is disabled.",
|
||||
)
|
||||
|
||||
g = parser.add_argument_group('Windows specific options')
|
||||
g.add_argument(
|
||||
"--version-file",
|
||||
dest="version_file",
|
||||
metavar="FILE",
|
||||
help="Add a version resource from FILE to the exe.",
|
||||
)
|
||||
g.add_argument(
|
||||
"--manifest",
|
||||
metavar="<FILE or XML>",
|
||||
help="Add manifest FILE or XML to the exe.",
|
||||
)
|
||||
g.add_argument(
|
||||
"-m",
|
||||
dest="shorthand_manifest",
|
||||
metavar="<FILE or XML>",
|
||||
help="Deprecated shorthand for --manifest.",
|
||||
)
|
||||
g.add_argument(
|
||||
"--no-embed-manifest",
|
||||
action=_RemovedNoEmbedManifestAction,
|
||||
)
|
||||
g.add_argument(
|
||||
"-r",
|
||||
"--resource",
|
||||
dest="resources",
|
||||
metavar="RESOURCE",
|
||||
action="append",
|
||||
default=[],
|
||||
help="Add or update a resource to a Windows executable. The RESOURCE is one to four items, "
|
||||
"FILE[,TYPE[,NAME[,LANGUAGE]]]. FILE can be a data file or an exe/dll. For data files, at least TYPE and NAME "
|
||||
"must be specified. LANGUAGE defaults to 0 or may be specified as wildcard * to update all resources of the "
|
||||
"given TYPE and NAME. For exe/dll files, all resources from FILE will be added/updated to the final executable "
|
||||
"if TYPE, NAME and LANGUAGE are omitted or specified as wildcard *. This option can be used multiple times.",
|
||||
)
|
||||
g.add_argument(
|
||||
'--uac-admin',
|
||||
dest='uac_admin',
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Using this option creates a Manifest that will request elevation upon application start.",
|
||||
)
|
||||
g.add_argument(
|
||||
'--uac-uiaccess',
|
||||
dest='uac_uiaccess',
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Using this option allows an elevated application to work with Remote Desktop.",
|
||||
)
|
||||
|
||||
g = parser.add_argument_group('Windows Side-by-side Assembly searching options (advanced)')
|
||||
g.add_argument(
|
||||
"--win-private-assemblies",
|
||||
action=_RemovedWinPrivateAssembliesAction,
|
||||
)
|
||||
g.add_argument(
|
||||
"--win-no-prefer-redirects",
|
||||
action=_RemovedWinNoPreferRedirectsAction,
|
||||
)
|
||||
|
||||
g = parser.add_argument_group('macOS specific options')
|
||||
g.add_argument(
|
||||
"--argv-emulation",
|
||||
dest="argv_emulation",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Enable argv emulation for macOS app bundles. If enabled, the initial open document/URL event is "
|
||||
"processed by the bootloader and the passed file paths or URLs are appended to sys.argv.",
|
||||
)
|
||||
|
||||
g.add_argument(
|
||||
'--osx-bundle-identifier',
|
||||
dest='bundle_identifier',
|
||||
help="macOS .app bundle identifier is used as the default unique program name for code signing purposes. "
|
||||
"The usual form is a hierarchical name in reverse DNS notation. For example: com.mycompany.department.appname "
|
||||
"(default: first script's basename)",
|
||||
)
|
||||
|
||||
g.add_argument(
|
||||
'--target-architecture',
|
||||
'--target-arch',
|
||||
dest='target_arch',
|
||||
metavar='ARCH',
|
||||
default=None,
|
||||
help="Target architecture (macOS only; valid values: x86_64, arm64, universal2). Enables switching between "
|
||||
"universal2 and single-arch version of frozen application (provided python installation supports the target "
|
||||
"architecture). If not target architecture is not specified, the current running architecture is targeted.",
|
||||
)
|
||||
|
||||
g.add_argument(
|
||||
'--codesign-identity',
|
||||
dest='codesign_identity',
|
||||
metavar='IDENTITY',
|
||||
default=None,
|
||||
help="Code signing identity (macOS only). Use the provided identity to sign collected binaries and generated "
|
||||
"executable. If signing identity is not provided, ad-hoc signing is performed instead.",
|
||||
)
|
||||
|
||||
g.add_argument(
|
||||
'--osx-entitlements-file',
|
||||
dest='entitlements_file',
|
||||
metavar='FILENAME',
|
||||
default=None,
|
||||
help="Entitlements file to use when code-signing the collected binaries (macOS only).",
|
||||
)
|
||||
|
||||
g = parser.add_argument_group('Rarely used special options')
|
||||
g.add_argument(
|
||||
"--runtime-tmpdir",
|
||||
dest="runtime_tmpdir",
|
||||
metavar="PATH",
|
||||
help="Where to extract libraries and support files in `onefile` mode. If this option is given, the bootloader "
|
||||
"will ignore any temp-folder location defined by the run-time OS. The ``_MEIxxxxxx``-folder will be created "
|
||||
"here. Please use this option only if you know what you are doing. Note that on POSIX systems, PyInstaller's "
|
||||
"bootloader does NOT perform shell-style environment variable expansion on the given path string. Therefore, "
|
||||
"using environment variables (e.g., ``~`` or ``$HOME``) in path will NOT work.",
|
||||
)
|
||||
g.add_argument(
|
||||
"--bootloader-ignore-signals",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Tell the bootloader to ignore signals rather than forwarding them to the child process. Useful in "
|
||||
"situations where for example a supervisor process signals both the bootloader and the child (e.g., via a "
|
||||
"process group) to avoid signalling the child twice.",
|
||||
)
|
||||
|
||||
|
||||
def main(
|
||||
scripts,
|
||||
name=None,
|
||||
onefile=False,
|
||||
console=True,
|
||||
debug=[],
|
||||
python_options=[],
|
||||
strip=False,
|
||||
noupx=False,
|
||||
upx_exclude=None,
|
||||
runtime_tmpdir=None,
|
||||
contents_directory=None,
|
||||
pathex=[],
|
||||
version_file=None,
|
||||
specpath=None,
|
||||
bootloader_ignore_signals=False,
|
||||
disable_windowed_traceback=False,
|
||||
datas=[],
|
||||
binaries=[],
|
||||
icon_file=None,
|
||||
manifest=None,
|
||||
resources=[],
|
||||
bundle_identifier=None,
|
||||
hiddenimports=[],
|
||||
hookspath=[],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
uac_admin=False,
|
||||
uac_uiaccess=False,
|
||||
collect_submodules=[],
|
||||
collect_binaries=[],
|
||||
collect_data=[],
|
||||
collect_all=[],
|
||||
copy_metadata=[],
|
||||
splash=None,
|
||||
recursive_copy_metadata=[],
|
||||
target_arch=None,
|
||||
codesign_identity=None,
|
||||
entitlements_file=None,
|
||||
argv_emulation=False,
|
||||
hide_console=None,
|
||||
optimize=None,
|
||||
**_kwargs
|
||||
):
|
||||
# Default values for onefile and console when not explicitly specified on command-line (indicated by None)
|
||||
if onefile is None:
|
||||
onefile = False
|
||||
|
||||
if console is None:
|
||||
console = True
|
||||
|
||||
# If appname is not specified - use the basename of the main script as name.
|
||||
if name is None:
|
||||
name = os.path.splitext(os.path.basename(scripts[0]))[0]
|
||||
|
||||
# If specpath not specified - use default value - current working directory.
|
||||
if specpath is None:
|
||||
specpath = DEFAULT_SPECPATH
|
||||
else:
|
||||
# Expand starting tilde into user's home directory, as a work-around for tilde not being expanded by shell when
|
||||
# using `--specpath=~/path/abc` instead of `--specpath ~/path/abc` (or when the path argument is quoted).
|
||||
specpath = os.path.expanduser(specpath)
|
||||
# If cwd is the root directory of PyInstaller, generate the .spec file in ./appname/ subdirectory.
|
||||
if specpath == HOMEPATH:
|
||||
specpath = os.path.join(HOMEPATH, name)
|
||||
# Create directory tree if missing.
|
||||
if not os.path.exists(specpath):
|
||||
os.makedirs(specpath)
|
||||
|
||||
# Handle additional EXE options.
|
||||
exe_options = ''
|
||||
if version_file:
|
||||
exe_options += "\n version='%s'," % escape_win_filepath(version_file)
|
||||
if uac_admin:
|
||||
exe_options += "\n uac_admin=True,"
|
||||
if uac_uiaccess:
|
||||
exe_options += "\n uac_uiaccess=True,"
|
||||
if icon_file:
|
||||
# Icon file for Windows.
|
||||
# On Windows, the default icon is embedded in the bootloader executable.
|
||||
if icon_file[0] == 'NONE':
|
||||
exe_options += "\n icon='NONE',"
|
||||
else:
|
||||
exe_options += "\n icon=[%s]," % ','.join("'%s'" % escape_win_filepath(ic) for ic in icon_file)
|
||||
# Icon file for macOS.
|
||||
# We need to encapsulate it into apostrofes.
|
||||
icon_file = "'%s'" % icon_file[0]
|
||||
else:
|
||||
# On macOS, the default icon has to be copied into the .app bundle.
|
||||
# The the text value 'None' means - use default icon.
|
||||
icon_file = 'None'
|
||||
if contents_directory:
|
||||
exe_options += "\n contents_directory='%s'," % (contents_directory or "_internal")
|
||||
if hide_console:
|
||||
exe_options += "\n hide_console='%s'," % hide_console
|
||||
|
||||
if bundle_identifier:
|
||||
# We need to encapsulate it into apostrofes.
|
||||
bundle_identifier = "'%s'" % bundle_identifier
|
||||
|
||||
if _kwargs["shorthand_manifest"]:
|
||||
manifest = _kwargs["shorthand_manifest"]
|
||||
logger.log(
|
||||
logging.DEPRECATION, "PyInstaller v7 will remove the -m shorthand flag. Please use --manifest=%s instead",
|
||||
manifest
|
||||
)
|
||||
if manifest:
|
||||
if "<" in manifest:
|
||||
# Assume XML string
|
||||
exe_options += "\n manifest='%s'," % manifest.replace("'", "\\'")
|
||||
else:
|
||||
# Assume filename
|
||||
exe_options += "\n manifest='%s'," % escape_win_filepath(manifest)
|
||||
if resources:
|
||||
resources = list(map(escape_win_filepath, resources))
|
||||
exe_options += "\n resources=%s," % repr(resources)
|
||||
|
||||
hiddenimports = hiddenimports or []
|
||||
upx_exclude = upx_exclude or []
|
||||
|
||||
if is_darwin and onefile and not console:
|
||||
from PyInstaller.building.osx import WINDOWED_ONEFILE_DEPRCATION
|
||||
logger.log(logging.DEPRECATION, WINDOWED_ONEFILE_DEPRCATION)
|
||||
|
||||
# If file extension of the first script is '.pyw', force --windowed option.
|
||||
if is_win and os.path.splitext(scripts[0])[-1] == '.pyw':
|
||||
console = False
|
||||
|
||||
# If script paths are relative, make them relative to the directory containing .spec file.
|
||||
scripts = [make_path_spec_relative(x, specpath) for x in scripts]
|
||||
# With absolute paths replace prefix with variable HOMEPATH.
|
||||
scripts = list(map(Path, scripts))
|
||||
|
||||
# Translate the default of ``debug=None`` to an empty list.
|
||||
if debug is None:
|
||||
debug = []
|
||||
# Translate the ``all`` option.
|
||||
if DEBUG_ALL_CHOICE[0] in debug:
|
||||
debug = DEBUG_ARGUMENT_CHOICES
|
||||
|
||||
# Create preamble (for collect_*() calls)
|
||||
preamble = Preamble(
|
||||
datas, binaries, hiddenimports, collect_data, collect_binaries, collect_submodules, collect_all, copy_metadata,
|
||||
recursive_copy_metadata
|
||||
)
|
||||
|
||||
if splash:
|
||||
splash_init = splashtmpl % {'splash_image': splash}
|
||||
splash_binaries = "\n splash.binaries,"
|
||||
splash_target = "\n splash,"
|
||||
else:
|
||||
splash_init = splash_binaries = splash_target = ""
|
||||
|
||||
# Infer byte-code optimization level.
|
||||
opt_level = sum([opt == 'O' for opt in python_options])
|
||||
if opt_level > 2:
|
||||
logger.warning(
|
||||
"The switch '--python-option O' has been specified %d times - it should be specified at most twice!",
|
||||
opt_level,
|
||||
)
|
||||
opt_level = 2
|
||||
|
||||
if optimize is None:
|
||||
if opt_level == 0:
|
||||
# Infer from running python process
|
||||
optimize = sys.flags.optimize
|
||||
else:
|
||||
# Infer from `--python-option O` switch(es).
|
||||
optimize = opt_level
|
||||
elif optimize != opt_level and opt_level != 0:
|
||||
logger.warning(
|
||||
"Mismatch between optimization level passed via --optimize switch (%d) and number of '--python-option O' "
|
||||
"switches (%d)!",
|
||||
optimize,
|
||||
opt_level,
|
||||
)
|
||||
|
||||
if optimize >= 0:
|
||||
# Ensure OPTIONs passed to bootloader match the optimization settings.
|
||||
python_options += max(0, optimize - opt_level) * ['O']
|
||||
|
||||
# Create OPTIONs array
|
||||
if 'imports' in debug and 'v' not in python_options:
|
||||
python_options.append('v')
|
||||
python_options_array = [(opt, None, 'OPTION') for opt in python_options]
|
||||
|
||||
d = {
|
||||
'scripts': scripts,
|
||||
'pathex': pathex or [],
|
||||
'binaries': preamble.binaries,
|
||||
'datas': preamble.datas,
|
||||
'hiddenimports': preamble.hiddenimports,
|
||||
'preamble': preamble.content,
|
||||
'name': name,
|
||||
'noarchive': 'noarchive' in debug,
|
||||
'optimize': optimize,
|
||||
'options': python_options_array,
|
||||
'debug_bootloader': 'bootloader' in debug,
|
||||
'bootloader_ignore_signals': bootloader_ignore_signals,
|
||||
'strip': strip,
|
||||
'upx': not noupx,
|
||||
'upx_exclude': upx_exclude,
|
||||
'runtime_tmpdir': runtime_tmpdir,
|
||||
'exe_options': exe_options,
|
||||
# Directory with additional custom import hooks.
|
||||
'hookspath': hookspath,
|
||||
# List with custom runtime hook files.
|
||||
'runtime_hooks': runtime_hooks or [],
|
||||
# List of modules/packages to ignore.
|
||||
'excludes': excludes or [],
|
||||
# only Windows and macOS distinguish windowed and console apps
|
||||
'console': console,
|
||||
'disable_windowed_traceback': disable_windowed_traceback,
|
||||
# Icon filename. Only macOS uses this item.
|
||||
'icon': icon_file,
|
||||
# .app bundle identifier. Only macOS uses this item.
|
||||
'bundle_identifier': bundle_identifier,
|
||||
# argv emulation (macOS only)
|
||||
'argv_emulation': argv_emulation,
|
||||
# Target architecture (macOS only)
|
||||
'target_arch': target_arch,
|
||||
# Code signing identity (macOS only)
|
||||
'codesign_identity': codesign_identity,
|
||||
# Entitlements file (macOS only)
|
||||
'entitlements_file': entitlements_file,
|
||||
# splash screen
|
||||
'splash_init': splash_init,
|
||||
'splash_target': splash_target,
|
||||
'splash_binaries': splash_binaries,
|
||||
}
|
||||
|
||||
# Write down .spec file to filesystem.
|
||||
specfnm = os.path.join(specpath, name + '.spec')
|
||||
with open(specfnm, 'w', encoding='utf-8') as specfile:
|
||||
if onefile:
|
||||
specfile.write(onefiletmplt % d)
|
||||
# For macOS create .app bundle.
|
||||
if is_darwin and not console:
|
||||
specfile.write(bundleexetmplt % d)
|
||||
else:
|
||||
specfile.write(onedirtmplt % d)
|
||||
# For macOS create .app bundle.
|
||||
if is_darwin and not console:
|
||||
specfile.write(bundletmplt % d)
|
||||
|
||||
return specfnm
|
||||
736
venv/lib/python3.12/site-packages/PyInstaller/building/osx.py
Executable file
736
venv/lib/python3.12/site-packages/PyInstaller/building/osx.py
Executable file
@@ -0,0 +1,736 @@
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2005-2023, PyInstaller Development Team.
|
||||
#
|
||||
# Distributed under the terms of the GNU General Public License (version 2
|
||||
# or later) with exception for distributing the bootloader.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#
|
||||
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import plistlib
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
from PyInstaller import log as logging
|
||||
from PyInstaller.building.api import COLLECT, EXE
|
||||
from PyInstaller.building.datastruct import Target, logger, normalize_toc
|
||||
from PyInstaller.building.utils import _check_path_overlap, _rmtree, process_collected_binary
|
||||
from PyInstaller.compat import is_darwin, strict_collect_mode
|
||||
from PyInstaller.building.icon import normalize_icon_type
|
||||
import PyInstaller.utils.misc as miscutils
|
||||
|
||||
if is_darwin:
|
||||
import PyInstaller.utils.osx as osxutils
|
||||
|
||||
# Character sequence used to replace dot (`.`) in names of directories that are created in `Contents/MacOS` or
|
||||
# `Contents/Frameworks`, where only .framework bundle directories are allowed to have dot in name.
|
||||
DOT_REPLACEMENT = '__dot__'
|
||||
|
||||
WINDOWED_ONEFILE_DEPRCATION = (
|
||||
"Onefile mode in combination with macOS .app bundles (windowed mode) don't make sense (a .app bundle can not be a "
|
||||
"single file) and clashes with macOS's security. Please migrate to onedir mode. This will become an error "
|
||||
"in v7.0."
|
||||
)
|
||||
|
||||
|
||||
class BUNDLE(Target):
|
||||
def __init__(self, *args, **kwargs):
|
||||
from PyInstaller.config import CONF
|
||||
|
||||
for item in args:
|
||||
if isinstance(item, EXE) and not item.exclude_binaries:
|
||||
logger.log(logging.DEPRECATION, WINDOWED_ONEFILE_DEPRCATION)
|
||||
|
||||
# BUNDLE only has a sense under macOS, it is a noop on other platforms.
|
||||
if not is_darwin:
|
||||
return
|
||||
|
||||
# Get a path to a .icns icon for the app bundle.
|
||||
self.icon = kwargs.get('icon')
|
||||
if not self.icon:
|
||||
# --icon not specified; use the default in the pyinstaller folder
|
||||
self.icon = os.path.join(
|
||||
os.path.dirname(os.path.dirname(__file__)), 'bootloader', 'images', 'icon-windowed.icns'
|
||||
)
|
||||
else:
|
||||
# User gave an --icon=path. If it is relative, make it relative to the spec file location.
|
||||
if not os.path.isabs(self.icon):
|
||||
self.icon = os.path.join(CONF['specpath'], self.icon)
|
||||
|
||||
super().__init__()
|
||||
|
||||
# .app bundle is created in DISTPATH.
|
||||
self.name = kwargs.get('name', None)
|
||||
base_name = os.path.basename(self.name)
|
||||
self.name = os.path.join(CONF['distpath'], base_name)
|
||||
|
||||
self.appname = os.path.splitext(base_name)[0]
|
||||
# Ensure version is a string, even if user accidentally passed an int or a float.
|
||||
# Having a `CFBundleShortVersionString` entry of non-string type in `Info.plist` causes the .app bundle to
|
||||
# crash at start (#4466).
|
||||
self.version = str(kwargs.get("version", "0.0.0"))
|
||||
self.toc = []
|
||||
self.strip = False
|
||||
self.upx = False
|
||||
self.console = True
|
||||
self.target_arch = None
|
||||
self.codesign_identity = None
|
||||
self.entitlements_file = None
|
||||
|
||||
# .app bundle identifier for Code Signing
|
||||
self.bundle_identifier = kwargs.get('bundle_identifier')
|
||||
if not self.bundle_identifier:
|
||||
# Fallback to appname.
|
||||
self.bundle_identifier = self.appname
|
||||
|
||||
self.info_plist = kwargs.get('info_plist', None)
|
||||
|
||||
for arg in args:
|
||||
# Valid arguments: EXE object, COLLECT object, and TOC-like iterables
|
||||
if isinstance(arg, EXE):
|
||||
# Add EXE as an entry to the TOC, and merge its dependencies TOC
|
||||
self.toc.append((os.path.basename(arg.name), arg.name, 'EXECUTABLE'))
|
||||
self.toc.extend(arg.dependencies)
|
||||
# Inherit settings
|
||||
self.strip = arg.strip
|
||||
self.upx = arg.upx
|
||||
self.upx_exclude = arg.upx_exclude
|
||||
self.console = arg.console
|
||||
self.target_arch = arg.target_arch
|
||||
self.codesign_identity = arg.codesign_identity
|
||||
self.entitlements_file = arg.entitlements_file
|
||||
elif isinstance(arg, COLLECT):
|
||||
# Merge the TOC
|
||||
self.toc.extend(arg.toc)
|
||||
# Inherit settings
|
||||
self.strip = arg.strip_binaries
|
||||
self.upx = arg.upx_binaries
|
||||
self.upx_exclude = arg.upx_exclude
|
||||
self.console = arg.console
|
||||
self.target_arch = arg.target_arch
|
||||
self.codesign_identity = arg.codesign_identity
|
||||
self.entitlements_file = arg.entitlements_file
|
||||
elif miscutils.is_iterable(arg):
|
||||
# TOC-like iterable
|
||||
self.toc.extend(arg)
|
||||
else:
|
||||
raise TypeError(f"Invalid argument type for BUNDLE: {type(arg)!r}")
|
||||
|
||||
# Infer the executable name from the first EXECUTABLE entry in the TOC; it might have come from the COLLECT
|
||||
# (as opposed to the stand-alone EXE).
|
||||
for dest_name, src_name, typecode in self.toc:
|
||||
if typecode == "EXECUTABLE":
|
||||
self.exename = src_name
|
||||
break
|
||||
else:
|
||||
raise ValueError("No EXECUTABLE entry found in the TOC!")
|
||||
|
||||
# Normalize TOC
|
||||
self.toc = normalize_toc(self.toc)
|
||||
|
||||
self.__postinit__()
|
||||
|
||||
_GUTS = (
|
||||
# BUNDLE always builds, just want the toc to be written out
|
||||
('toc', None),
|
||||
)
|
||||
|
||||
def _check_guts(self, data, last_build):
|
||||
# BUNDLE always needs to be executed, in order to clean the output directory.
|
||||
return True
|
||||
|
||||
# Helper for determining whether the given file belongs to a .framework bundle or not. If it does, it returns
|
||||
# the path to the top-level .framework bundle directory; otherwise, returns None. In case of nested .framework
|
||||
# bundles, the path to the top-most .framework bundle directory is returned.
|
||||
@staticmethod
|
||||
def _is_framework_file(dest_path):
|
||||
# NOTE: reverse the parents list because we are looking for the top-most .framework bundle directory!
|
||||
for parent in reversed(dest_path.parents):
|
||||
if parent.name.endswith('.framework'):
|
||||
return parent
|
||||
return None
|
||||
|
||||
# Helper that computes relative cross-link path between link's location and target, assuming they are both
|
||||
# rooted in the `Contents` directory of a macOS .app bundle.
|
||||
@staticmethod
|
||||
def _compute_relative_crosslink(crosslink_location, crosslink_target):
|
||||
# We could take symlink_location and symlink_target as they are (relative to parent of the `Contents`
|
||||
# directory), but that would introduce an unnecessary `../Contents` part. So instead, we take both paths
|
||||
# relative to the `Contents` directory.
|
||||
return os.path.join(
|
||||
*['..' for level in pathlib.PurePath(crosslink_location).relative_to('Contents').parent.parts],
|
||||
pathlib.PurePath(crosslink_target).relative_to('Contents'),
|
||||
)
|
||||
|
||||
# This method takes the original (input) TOC and processes it into final TOC, based on which the `assemble` method
|
||||
# performs its file collection. The TOC processing here represents the core of our efforts to generate an .app
|
||||
# bundle that is compatible with Apple's code-signing requirements.
|
||||
#
|
||||
# For in-depth details on the code-signing, see Apple's `Technical Note TN2206: macOS Code Signing In Depth` at
|
||||
# https://developer.apple.com/library/archive/technotes/tn2206/_index.html
|
||||
#
|
||||
# The requirements, framed from PyInstaller's perspective, can be summarized as follows:
|
||||
#
|
||||
# 1. The `Contents/MacOS` directory is expected to contain only the program executable and (binary) code (= dylibs
|
||||
# and nested .framework bundles). Alternatively, the dylibs and .framework bundles can be also placed into
|
||||
# `Contents/Frameworks` directory (where same rules apply as for `Contents/MacOS`, so the remainder of this
|
||||
# text refers to the two inter-changeably, unless explicitly noted otherwise). The code in `Contents/MacOS`
|
||||
# is expected to be signed, and the `codesign` utility will recursively sign all found code when using `--deep`
|
||||
# option to sign the .app bundle.
|
||||
#
|
||||
# 2. All non-code files should be be placed in `Contents/Resources`, so they become sealed (data) resources;
|
||||
# i.e., their signature data is recorded in `Contents/_CodeSignature/CodeResources`. (As a side note,
|
||||
# it seems that signature information for data/resources in `Contents/Resources` is kept nder `file` key in
|
||||
# the `CodeResources` file, while the information for contents in `Contents/MacOS` is kept under `file2` key).
|
||||
#
|
||||
# 3. The directories in `Contents/MacOS` may not contain dots (`.`) in their names, except for the nested
|
||||
# .framework bundle directories. The directories in `Contents/Resources` have no such restrictions.
|
||||
#
|
||||
# 4. There may not be any content in the top level of a bundle. In other words, if a bundle has a `Contents`
|
||||
# or a `Versions` directory at its top level, there may be no other files or directories alongside them. The
|
||||
# sole exception is that alongside `Versions`, there may be symlinks to files and directories in
|
||||
# `Versions/Current`. This rule is important for nested .framework bundles that we collect from python packages.
|
||||
#
|
||||
# Next, let us consider the consequences of violating each of the above requirements:
|
||||
#
|
||||
# 1. Code signing machinery can directly store signature only in Mach-O binaries and nested .framework bundles; if
|
||||
# a data file is placed in `Contents/MacOS`, the signature is stored in the file's extended attributes. If the
|
||||
# extended attributes are lost, the program's signature will be broken. Many file transfer techniques (e.g., a
|
||||
# zip file) do not preserve extended attributes, nor are they preserved when uploading to the Mac App Store.
|
||||
#
|
||||
# 2. Putting code (a dylib or a .framework bundle) into `Contents/Resources` causes it to be treated as a resource;
|
||||
# the outer signature (i.e., of the whole .app bundle) does not know that this nested content is actually a code.
|
||||
# Consequently, signing the bundle with `codesign --deep` will NOT sign binaries placed in the
|
||||
# `Contents/Resources`, which may result in missing signatures when .app bundle is verified for notarization.
|
||||
# This might be worked around by signing each binary separately, and then signing the whole bundle (without the
|
||||
# `--deep` option), but requires the user to keep track of the offending binaries.
|
||||
#
|
||||
# 3. If a directory in `Contents/MacOS` contains a dot in the name, code-signing the bundle fails with
|
||||
# `bundle format unrecognized, invalid, or unsuitable` due to code signing machinery treating directory as a
|
||||
# nested .framework bundle directory.
|
||||
#
|
||||
# 4. If nested .framework bundle is malformed, the signing of the .app bundle might succeed, but subsequent
|
||||
# verification will fail, for example with `embedded framework contains modified or invalid version` (as observed
|
||||
# with .framework bundles shipped by contemporary PyQt/PySide PyPI wheels).
|
||||
#
|
||||
# The above requirements are unfortunately often at odds with the structure of python packages:
|
||||
#
|
||||
# * In general, python packages are mixed-content directories, where binaries and data files may be expected to
|
||||
# be found next to each other.
|
||||
#
|
||||
# For example, `opencv-python` provides a custom loader script that requires the package to be collected in the
|
||||
# source-only form by PyInstaller (i.e., the python modules and scripts collected as source .py files). At the
|
||||
# same time, it expects the .py loader script to be able to find the binary extension next to itself.
|
||||
#
|
||||
# Another example of mixed-mode directories are Qt QML components' sub-directories, which contain both the
|
||||
# component's plugin (a binary) and associated meta files (data files).
|
||||
#
|
||||
# * In python world, the directories often contain dots in their names.
|
||||
#
|
||||
# Dots are often used for private directories containing binaries that are shipped with a package. For example,
|
||||
# `numpy/.dylibs`, `scipy/.dylibs`, etc.
|
||||
#
|
||||
# Qt QML components may also contain a dot in their name; couple of examples from `PySide2` package:
|
||||
# `PySide2/Qt/qml/QtQuick.2`, `PySide2/Qt/qml/QtQuick/Controls.2`, `PySide2/Qt/qml/QtQuick/Particles.2`, etc.
|
||||
#
|
||||
# The packages' metadata directories also invariably contain dots in the name due to version (for example,
|
||||
# `numpy-1.24.3.dist-info`).
|
||||
#
|
||||
# In the light of all above, PyInstaller attempts to strictly place all files to their mandated location
|
||||
# (`Contents/MacOS` or `Contents/Frameworks` vs `Contents/Resources`). To preserve the illusion of mixed-content
|
||||
# directories, the content is cross-linked from one directory to the other. Specifically:
|
||||
#
|
||||
# * All entries with DATA typecode are assumed to be data files, and are always placed in corresponding directory
|
||||
# structure rooted in `Contents/Resources`.
|
||||
#
|
||||
# * All entries with BINARY or EXTENSION typecode are always placed in corresponding directory structure rooted in
|
||||
# `Contents/Frameworks`.
|
||||
#
|
||||
# * All entries with EXECUTABLE are placed in `Contents/MacOS` directory.
|
||||
#
|
||||
# * For the purposes of relocation, nested .framework bundles are treated as a single BINARY entity; i.e., the
|
||||
# whole .bundle directory is placed in corresponding directory structure rooted in `Contents/Frameworks` (even
|
||||
# though some of its contents, such as `Info.plist` file, are actually data files).
|
||||
#
|
||||
# * Top-level data files and binaries are always cross-linked to the other directory. For example, given a data file
|
||||
# `data_file.txt` that was collected into `Contents/Resources`, we create a symbolic link called
|
||||
# `Contents/MacOS/data_file.txt` that points to `../Resources/data_file.txt`.
|
||||
#
|
||||
# * The executable itself, while placed in `Contents/MacOS`, are cross-linked into both `Contents/Framworks` and
|
||||
# `Contents/Resources`.
|
||||
#
|
||||
# * The stand-alone PKG entries (used with onefile builds that side-load the PKG archive) are treated as data files
|
||||
# and collected into `Contents/Resources`, but cross-linked only into `Contents/MacOS` directory (because they
|
||||
# must appear to be next to the program executable). This is the only entry type that is cross-linked into the
|
||||
# `Contents/MacOS` directory and also the only data-like entry type that is not cross-linked into the
|
||||
# `Contents/Frameworks` directory.
|
||||
#
|
||||
# * For files in sub-directories, the cross-linking behavior depends on the type of directory:
|
||||
#
|
||||
# * A data-only directory is created in directory structure rooted in `Contents/Resources`, and cross-linked
|
||||
# into directory structure rooted in `Contents/Frameworks` at directory level (i.e., we link the whole
|
||||
# directory instead of individual files).
|
||||
#
|
||||
# This largely saves us from having to deal with dots in the names of collected metadata directories, which
|
||||
# are examples of data-only directories.
|
||||
#
|
||||
# * A binary-only directory is created in directory structure rooted in `Contents/Frameworks`, and cross-linked
|
||||
# into `Contents/Resources` at directory level.
|
||||
#
|
||||
# * A mixed-content directory is created in both directory structures. Files are placed into corresponding
|
||||
# directory structure based on their type, and cross-linked into other directory structure at file level.
|
||||
#
|
||||
# * This rule is applied recursively; for example, a data-only sub-directory in a mixed-content directory is
|
||||
# cross-linked at directory level, while adjacent binary and data files are cross-linked at file level.
|
||||
#
|
||||
# * To work around the issue with dots in the names of directories in `Contents/Frameworks` (applicable to
|
||||
# binary-only or mixed-content directories), such directories are created with modified name (the dot replaced
|
||||
# with a pre-defined pattern). Next to the modified directory, a symbolic link with original name is created,
|
||||
# pointing to the directory with modified name. With mixed-content directories, this modification is performed
|
||||
# only on the `Contents/Frameworks` side; the corresponding directory in `Contents/Resources` can be created
|
||||
# directly, without name modification and symbolic link.
|
||||
#
|
||||
# * If a symbolic link needs to be created in a mixed-content directory due to a SYMLINK entry from the original
|
||||
# TOC (i.e., a "collected" symlink originating from analysis, as opposed to the cross-linking mechanism described
|
||||
# above), the link is created in both directory structures, each pointing to the resource in its corresponding
|
||||
# directory structure (with one such resource being an actual file, and the other being a cross-link to the file).
|
||||
#
|
||||
# Final remarks:
|
||||
#
|
||||
# NOTE: the relocation mechanism is codified by tests in `tests/functional/test_macos_bundle_structure.py`.
|
||||
#
|
||||
# NOTE: by placing binaries and nested .framework entries into `Contents/Frameworks` instead of `Contents/MacOS`,
|
||||
# we have effectively relocated the `sys._MEIPASS` directory from the `Contents/MacOS` (= the parent directory of
|
||||
# the program executable) into `Contents/Frameworks`. This requires the PyInstaller's bootloader to detect that it
|
||||
# is running in the app-bundle mode (e.g., by checking if program executable's parent directory is `Contents/NacOS`)
|
||||
# and adjust the path accordingly.
|
||||
#
|
||||
# NOTE: the implemented relocation mechanism depends on the input TOC containing properly classified entries
|
||||
# w.r.t. BINARY vs DATA. So hooks and .spec files triggering collection of binaries as datas (and vice versa) will
|
||||
# result in incorrect placement of those files in the generated .app bundle. However, this is *not* the proper place
|
||||
# to address such issues; if necessary, automatic (re)classification should be added to analysis process, to ensure
|
||||
# that BUNDLE (as well as other build targets) receive correctly classified TOC.
|
||||
#
|
||||
# NOTE: similar to the previous note, the relocation mechanism is also not the proper place to enforce compliant
|
||||
# structure of the nested .framework bundles. Instead, this is handled by the analysis process, using the
|
||||
# `PyInstaller.utils.osx.collect_files_from_framework_bundles` helper function. So the input TOC that BUNDLE
|
||||
# receives should already contain entries that reconstruct compliant nested .framework bundles.
|
||||
def _process_bundle_toc(self, toc):
|
||||
bundle_toc = []
|
||||
|
||||
# Step 1: inspect the directory layout and classify the directories according to their contents.
|
||||
directory_types = dict()
|
||||
|
||||
_MIXED_DIR_TYPE = 'MIXED-DIR'
|
||||
_DATA_DIR_TYPE = 'DATA-DIR'
|
||||
_BINARY_DIR_TYPE = 'BINARY-DIR'
|
||||
_FRAMEWORK_DIR_TYPE = 'FRAMEWORK-DIR'
|
||||
|
||||
_TOP_LEVEL_DIR = pathlib.PurePath('.')
|
||||
|
||||
for dest_name, src_name, typecode in toc:
|
||||
dest_path = pathlib.PurePath(dest_name)
|
||||
|
||||
framework_dir = self._is_framework_file(dest_path)
|
||||
if framework_dir:
|
||||
# Mark the framework directory as FRAMEWORK-DIR.
|
||||
directory_types[framework_dir] = _FRAMEWORK_DIR_TYPE
|
||||
# Treat the framework directory as BINARY file when classifying parent directories.
|
||||
typecode = 'BINARY'
|
||||
parent_dirs = framework_dir.parents
|
||||
else:
|
||||
parent_dirs = dest_path.parents
|
||||
# Treat BINARY and EXTENSION as BINARY to simplify further processing.
|
||||
if typecode == 'EXTENSION':
|
||||
typecode = 'BINARY'
|
||||
|
||||
# (Re)classify parent directories
|
||||
for parent_dir in parent_dirs:
|
||||
# Skip the top-level `.` dir. This is also the only directory that can contain EXECUTABLE and PKG
|
||||
# entries, so we do not have to worry about.
|
||||
if parent_dir == _TOP_LEVEL_DIR:
|
||||
continue
|
||||
|
||||
directory_type = _BINARY_DIR_TYPE if typecode == 'BINARY' else _DATA_DIR_TYPE # default
|
||||
directory_type = directory_types.get(parent_dir, directory_type)
|
||||
|
||||
if directory_type == _DATA_DIR_TYPE and typecode == 'BINARY':
|
||||
directory_type = _MIXED_DIR_TYPE
|
||||
if directory_type == _BINARY_DIR_TYPE and typecode == 'DATA':
|
||||
directory_type = _MIXED_DIR_TYPE
|
||||
|
||||
directory_types[parent_dir] = directory_type
|
||||
|
||||
logger.debug("Directory classification: %r", directory_types)
|
||||
|
||||
# Step 2: process the obtained directory structure and create symlink entries for directories that need to be
|
||||
# cross-linked. Such directories are data-only and binary-only directories (and framework directories) that are
|
||||
# located either in the top-level directory (have no parent) or in a mixed-content directory.
|
||||
for directory_path, directory_type in directory_types.items():
|
||||
# Cross-linking at directory level applies only to data-only and binary-only directories (as well as
|
||||
# framework directories).
|
||||
if directory_type == _MIXED_DIR_TYPE:
|
||||
continue
|
||||
|
||||
# The parent needs to be either top-level directory or a mixed-content directory. Otherwise, the parent
|
||||
# (or one of its ancestors) will get cross-linked, and we do not need the link here.
|
||||
parent_dir = directory_path.parent
|
||||
requires_crosslink = parent_dir == _TOP_LEVEL_DIR or directory_types.get(parent_dir) == _MIXED_DIR_TYPE
|
||||
if not requires_crosslink:
|
||||
continue
|
||||
|
||||
logger.debug("Cross-linking directory %r of type %r", directory_path, directory_type)
|
||||
|
||||
# Data-only directories are created in `Contents/Resources`, needs to be cross-linked into `Contents/MacOS`.
|
||||
# Vice versa for binary-only or framework directories. The directory creation is handled implicitly, when we
|
||||
# create parent directory structure for collected files.
|
||||
if directory_type == _DATA_DIR_TYPE:
|
||||
symlink_src = os.path.join('Contents/Resources', directory_path)
|
||||
symlink_dest = os.path.join('Contents/Frameworks', directory_path)
|
||||
else:
|
||||
symlink_src = os.path.join('Contents/Frameworks', directory_path)
|
||||
symlink_dest = os.path.join('Contents/Resources', directory_path)
|
||||
symlink_ref = self._compute_relative_crosslink(symlink_dest, symlink_src)
|
||||
|
||||
bundle_toc.append((symlink_dest, symlink_ref, 'SYMLINK'))
|
||||
|
||||
# Step 3: first part of the work-around for directories that are located in `Contents/Frameworks` but contain a
|
||||
# dot in their name. As per `codesign` rules, the only directories in `Contents/Frameworks` that are allowed to
|
||||
# contain a dot in their name are .framework bundle directories. So we replace the dot with a custom character
|
||||
# sequence (stored in global `DOT_REPLACEMENT` variable), and create a symbolic with original name pointing to
|
||||
# the modified name. This is the best we can do with code-sign requirements vs. python community showing their
|
||||
# packages' dylibs into `.dylib` subdirectories, or Qt storing their Qml components in directories named
|
||||
# `QtQuick.2`, `QtQuick/Controls.2`, `QtQuick/Particles.2`, `QtQuick/Templates.2`, etc.
|
||||
#
|
||||
# In this step, we only prepare symlink entries that link the original directory name (with dot) to the modified
|
||||
# one (with dot replaced). The parent paths for collected files are modified in later step(s).
|
||||
for directory_path, directory_type in directory_types.items():
|
||||
# .framework bundle directories contain a dot in the name, but are allowed that.
|
||||
if directory_type == _FRAMEWORK_DIR_TYPE:
|
||||
continue
|
||||
|
||||
# Data-only directories are fully located in `Contents/Resources` and cross-linked to `Contents/Frameworks`
|
||||
# at directory level, so they are also allowed a dot in their name.
|
||||
if directory_type == _DATA_DIR_TYPE:
|
||||
continue
|
||||
|
||||
# Apply the work-around, if necessary...
|
||||
if '.' not in directory_path.name:
|
||||
continue
|
||||
|
||||
logger.debug(
|
||||
"Creating symlink to work around the dot in the name of directory %r (%s)...", str(directory_path),
|
||||
directory_type
|
||||
)
|
||||
|
||||
# Create a SYMLINK entry, but only for this level. In case of nested directories with dots in names, the
|
||||
# symlinks for ancestors will be created by corresponding loop iteration.
|
||||
bundle_toc.append((
|
||||
os.path.join('Contents/Frameworks', directory_path),
|
||||
directory_path.name.replace('.', DOT_REPLACEMENT),
|
||||
'SYMLINK',
|
||||
))
|
||||
|
||||
# Step 4: process the entries for collected files, and decide whether they should be placed into
|
||||
# `Contents/MacOS`, `Contents/Frameworks`, or `Contents/Resources`, and whether they should be cross-linked into
|
||||
# other directories.
|
||||
for orig_dest_name, src_name, typecode in toc:
|
||||
orig_dest_path = pathlib.PurePath(orig_dest_name)
|
||||
|
||||
# Special handling for EXECUTABLE and PKG entries
|
||||
if typecode == 'EXECUTABLE':
|
||||
# Place into `Contents/MacOS`, ...
|
||||
file_dest = os.path.join('Contents/MacOS', orig_dest_name)
|
||||
bundle_toc.append((file_dest, src_name, typecode))
|
||||
# ... and do nothing else. We explicitly avoid cross-linking the executable to `Contents/Frameworks` and
|
||||
# `Contents/Resources`, because it should be not necessary (the executable's location should be
|
||||
# discovered via `sys.executable`) and to prevent issues when executable name collides with name of a
|
||||
# package from which we collect either binaries or data files (or both); see #7314.
|
||||
continue
|
||||
elif typecode == 'PKG':
|
||||
# Place into `Contents/Resources` ...
|
||||
file_dest = os.path.join('Contents/Resources', orig_dest_name)
|
||||
bundle_toc.append((file_dest, src_name, typecode))
|
||||
# ... and cross-link only into `Contents/MacOS`.
|
||||
# This is used only in `onefile` mode, where there is actually no other content to distribute among the
|
||||
# `Contents/Resources` and `Contents/Frameworks` directories, so cross-linking into the latter makes
|
||||
# little sense.
|
||||
symlink_dest = os.path.join('Contents/MacOS', orig_dest_name)
|
||||
symlink_ref = self._compute_relative_crosslink(symlink_dest, file_dest)
|
||||
bundle_toc.append((symlink_dest, symlink_ref, 'SYMLINK'))
|
||||
continue
|
||||
|
||||
# Standard data vs binary processing...
|
||||
|
||||
# Determine file location based on its type.
|
||||
if self._is_framework_file(orig_dest_path):
|
||||
# File from a framework bundle; put into `Contents/Frameworks`, but never cross-link the file itself.
|
||||
# The whole .framework bundle directory will be linked as necessary by the directory cross-linking
|
||||
# mechanism.
|
||||
file_base_dir = 'Contents/Frameworks'
|
||||
crosslink_base_dir = None
|
||||
elif typecode == 'DATA':
|
||||
# Data file; relocate to `Contents/Resources` and cross-link it back into `Contents/Frameworks`.
|
||||
file_base_dir = 'Contents/Resources'
|
||||
crosslink_base_dir = 'Contents/Frameworks'
|
||||
else:
|
||||
# Binary; put into `Contents/Frameworks` and cross-link it into `Contents/Resources`.
|
||||
file_base_dir = 'Contents/Frameworks'
|
||||
crosslink_base_dir = 'Contents/Resources'
|
||||
|
||||
# Determine if we need to cross-link the file. We need to do this for top-level files (the ones without
|
||||
# parent directories), and for files whose parent directories are mixed-content directories.
|
||||
requires_crosslink = False
|
||||
if crosslink_base_dir is not None:
|
||||
parent_dir = orig_dest_path.parent
|
||||
requires_crosslink = parent_dir == _TOP_LEVEL_DIR or directory_types.get(parent_dir) == _MIXED_DIR_TYPE
|
||||
|
||||
# Special handling for SYMLINK entries in original TOC; if we need to cross-link a symlink entry, we create
|
||||
# it in both locations, and have each point to the (relative) resource in the same directory (so one of the
|
||||
# targets will likely be a file, and the other will be a symlink due to cross-linking).
|
||||
if typecode == 'SYMLINK' and requires_crosslink:
|
||||
bundle_toc.append((os.path.join(file_base_dir, orig_dest_name), src_name, typecode))
|
||||
bundle_toc.append((os.path.join(crosslink_base_dir, orig_dest_name), src_name, typecode))
|
||||
continue
|
||||
|
||||
# The file itself.
|
||||
file_dest = os.path.join(file_base_dir, orig_dest_name)
|
||||
bundle_toc.append((file_dest, src_name, typecode))
|
||||
|
||||
# Symlink for cross-linking
|
||||
if requires_crosslink:
|
||||
symlink_dest = os.path.join(crosslink_base_dir, orig_dest_name)
|
||||
symlink_ref = self._compute_relative_crosslink(symlink_dest, file_dest)
|
||||
bundle_toc.append((symlink_dest, symlink_ref, 'SYMLINK'))
|
||||
|
||||
# Step 5: sanitize all destination paths in the new TOC, to ensure that paths that are rooted in
|
||||
# `Contents/Frameworks` do not contain directories with dots in their names. Doing this as a post-processing
|
||||
# step keeps code simple and clean and ensures that this step is applied to files, symlinks that originate from
|
||||
# cross-linking files, and symlinks that originate from cross-linking directories. This in turn ensures that
|
||||
# all directory hierarchies created during the actual file collection have sanitized names, and that collection
|
||||
# outcome does not depend on the order of entries in the TOC.
|
||||
sanitized_toc = []
|
||||
for dest_name, src_name, typecode in bundle_toc:
|
||||
dest_path = pathlib.PurePath(dest_name)
|
||||
|
||||
# Paths rooted in Contents/Resources do not require sanitizing.
|
||||
if dest_path.parts[0] == 'Contents' and dest_path.parts[1] == 'Resources':
|
||||
sanitized_toc.append((dest_name, src_name, typecode))
|
||||
continue
|
||||
|
||||
# Special handling for files from .framework bundle directories; sanitize only parent path of the .framework
|
||||
# directory.
|
||||
framework_path = self._is_framework_file(dest_path)
|
||||
if framework_path:
|
||||
parent_path = framework_path.parent
|
||||
remaining_path = dest_path.relative_to(parent_path)
|
||||
else:
|
||||
parent_path = dest_path.parent
|
||||
remaining_path = dest_path.name
|
||||
|
||||
sanitized_dest_path = pathlib.PurePath(
|
||||
*parent_path.parts[:2], # Contents/Frameworks
|
||||
*[part.replace('.', DOT_REPLACEMENT) for part in parent_path.parts[2:]],
|
||||
remaining_path,
|
||||
)
|
||||
sanitized_dest_name = str(sanitized_dest_path)
|
||||
|
||||
if sanitized_dest_path != dest_path:
|
||||
logger.debug("Sanitizing dest path: %r -> %r", dest_name, sanitized_dest_name)
|
||||
|
||||
sanitized_toc.append((sanitized_dest_name, src_name, typecode))
|
||||
|
||||
bundle_toc = sanitized_toc
|
||||
|
||||
# Normalize and sort the TOC for easier inspection
|
||||
bundle_toc = sorted(normalize_toc(bundle_toc))
|
||||
|
||||
return bundle_toc
|
||||
|
||||
def assemble(self):
|
||||
from PyInstaller.config import CONF
|
||||
|
||||
if _check_path_overlap(self.name) and os.path.isdir(self.name):
|
||||
_rmtree(self.name)
|
||||
|
||||
logger.info("Building BUNDLE %s", self.tocbasename)
|
||||
|
||||
# Create a minimal Mac bundle structure.
|
||||
os.makedirs(os.path.join(self.name, "Contents", "MacOS"))
|
||||
os.makedirs(os.path.join(self.name, "Contents", "Resources"))
|
||||
os.makedirs(os.path.join(self.name, "Contents", "Frameworks"))
|
||||
|
||||
# Makes sure the icon exists and attempts to convert to the proper format if applicable
|
||||
self.icon = normalize_icon_type(self.icon, ("icns",), "icns", CONF["workpath"])
|
||||
|
||||
# Ensure icon path is absolute
|
||||
self.icon = os.path.abspath(self.icon)
|
||||
|
||||
# Copy icns icon to Resources directory.
|
||||
shutil.copyfile(self.icon, os.path.join(self.name, 'Contents', 'Resources', os.path.basename(self.icon)))
|
||||
|
||||
# Key/values for a minimal Info.plist file
|
||||
info_plist_dict = {
|
||||
"CFBundleDisplayName": self.appname,
|
||||
"CFBundleName": self.appname,
|
||||
|
||||
# Required by 'codesign' utility.
|
||||
# The value for CFBundleIdentifier is used as the default unique name of your program for Code Signing
|
||||
# purposes. It even identifies the APP for access to restricted macOS areas like Keychain.
|
||||
#
|
||||
# The identifier used for signing must be globally unique. The usual form for this identifier is a
|
||||
# hierarchical name in reverse DNS notation, starting with the toplevel domain, followed by the company
|
||||
# name, followed by the department within the company, and ending with the product name. Usually in the
|
||||
# form: com.mycompany.department.appname
|
||||
# CLI option --osx-bundle-identifier sets this value.
|
||||
"CFBundleIdentifier": self.bundle_identifier,
|
||||
"CFBundleExecutable": os.path.basename(self.exename),
|
||||
"CFBundleIconFile": os.path.basename(self.icon),
|
||||
"CFBundleInfoDictionaryVersion": "6.0",
|
||||
"CFBundlePackageType": "APPL",
|
||||
"CFBundleShortVersionString": self.version,
|
||||
}
|
||||
|
||||
# Set some default values. But they still can be overwritten by the user.
|
||||
if self.console:
|
||||
# Setting EXE console=True implies LSBackgroundOnly=True.
|
||||
info_plist_dict['LSBackgroundOnly'] = True
|
||||
else:
|
||||
# Let's use high resolution by default.
|
||||
info_plist_dict['NSHighResolutionCapable'] = True
|
||||
|
||||
# Merge info_plist settings from spec file
|
||||
if isinstance(self.info_plist, dict) and self.info_plist:
|
||||
info_plist_dict.update(self.info_plist)
|
||||
|
||||
plist_filename = os.path.join(self.name, "Contents", "Info.plist")
|
||||
with open(plist_filename, "wb") as plist_fh:
|
||||
plistlib.dump(info_plist_dict, plist_fh)
|
||||
|
||||
# Pre-process the TOC into its final BUNDLE-compatible form.
|
||||
bundle_toc = self._process_bundle_toc(self.toc)
|
||||
|
||||
# Perform the actual collection.
|
||||
CONTENTS_FRAMEWORKS_PATH = pathlib.PurePath('Contents/Frameworks')
|
||||
for dest_name, src_name, typecode in bundle_toc:
|
||||
# Create parent directory structure, if necessary
|
||||
dest_path = os.path.join(self.name, dest_name) # Absolute destination path
|
||||
dest_dir = os.path.dirname(dest_path)
|
||||
try:
|
||||
os.makedirs(dest_dir, exist_ok=True)
|
||||
except FileExistsError:
|
||||
raise SystemExit(
|
||||
f"ERROR: Pyinstaller needs to create a directory at {dest_dir!r}, "
|
||||
"but there already exists a file at that path!"
|
||||
)
|
||||
# Copy extensions and binaries from cache. This ensures that these files undergo additional binary
|
||||
# processing - have paths to linked libraries rewritten (relative to `@rpath`) and have rpath set to the
|
||||
# top-level directory (relative to `@loader_path`, i.e., the file's location). The "top-level" directory
|
||||
# in this case corresponds to `Contents/MacOS` (where `sys._MEIPASS` also points), so we need to pass
|
||||
# the cache retrieval function the *original* destination path (which is without preceding
|
||||
# `Contents/MacOS`).
|
||||
if typecode in ('EXTENSION', 'BINARY'):
|
||||
orig_dest_name = str(pathlib.PurePath(dest_name).relative_to(CONTENTS_FRAMEWORKS_PATH))
|
||||
src_name = process_collected_binary(
|
||||
src_name,
|
||||
orig_dest_name,
|
||||
use_strip=self.strip,
|
||||
use_upx=self.upx,
|
||||
upx_exclude=self.upx_exclude,
|
||||
target_arch=self.target_arch,
|
||||
codesign_identity=self.codesign_identity,
|
||||
entitlements_file=self.entitlements_file,
|
||||
strict_arch_validation=(typecode == 'EXTENSION'),
|
||||
)
|
||||
if typecode == 'SYMLINK':
|
||||
os.symlink(src_name, dest_path) # Create link at dest_path, pointing at (relative) src_name
|
||||
else:
|
||||
# BUNDLE does not support MERGE-based multipackage
|
||||
assert typecode != 'DEPENDENCY', "MERGE DEPENDENCY entries are not supported in BUNDLE!"
|
||||
|
||||
# At this point, `src_name` should be a valid file.
|
||||
if not os.path.isfile(src_name):
|
||||
raise ValueError(f"Resource {src_name!r} is not a valid file!")
|
||||
# If strict collection mode is enabled, the destination should not exist yet.
|
||||
if strict_collect_mode and os.path.exists(dest_path):
|
||||
raise ValueError(
|
||||
f"Attempting to collect a duplicated file into BUNDLE: {dest_name} (type: {typecode})"
|
||||
)
|
||||
# Use `shutil.copyfile` to copy file with default permissions. We do not attempt to preserve original
|
||||
# permissions nor metadata, as they might be too restrictive and cause issues either during subsequent
|
||||
# re-build attempts or when trying to move the application bundle. For binaries (and data files with
|
||||
# executable bit set), we manually set the executable bits after copying the file.
|
||||
shutil.copyfile(src_name, dest_path)
|
||||
if (
|
||||
typecode in ('EXTENSION', 'BINARY', 'EXECUTABLE')
|
||||
or (typecode == 'DATA' and os.access(src_name, os.X_OK))
|
||||
):
|
||||
os.chmod(dest_path, 0o755)
|
||||
|
||||
# Sign the bundle
|
||||
logger.info('Signing the BUNDLE...')
|
||||
try:
|
||||
osxutils.sign_binary(self.name, self.codesign_identity, self.entitlements_file, deep=True)
|
||||
except Exception as e:
|
||||
# Display a warning or re-raise the error, depending on the environment-variable setting.
|
||||
if os.environ.get("PYINSTALLER_STRICT_BUNDLE_CODESIGN_ERROR", "0") == "0":
|
||||
logger.warning("Error while signing the bundle: %s", e)
|
||||
logger.warning("You will need to sign the bundle manually!")
|
||||
else:
|
||||
raise RuntimeError("Failed to codesign the bundle!") from e
|
||||
|
||||
logger.info("Building BUNDLE %s completed successfully.", self.tocbasename)
|
||||
|
||||
# Optionally verify bundle's signature. This is primarily intended for our CI.
|
||||
if os.environ.get("PYINSTALLER_VERIFY_BUNDLE_SIGNATURE", "0") != "0":
|
||||
logger.info("Verifying signature for BUNDLE %s...", self.name)
|
||||
self.verify_bundle_signature(self.name)
|
||||
logger.info("BUNDLE verification complete!")
|
||||
|
||||
@staticmethod
|
||||
def verify_bundle_signature(bundle_dir):
|
||||
# First, verify the bundle signature using codesign.
|
||||
cmd_args = ['/usr/bin/codesign', '--verify', '--all-architectures', '--deep', '--strict', bundle_dir]
|
||||
p = subprocess.run(cmd_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, encoding='utf8')
|
||||
if p.returncode:
|
||||
raise SystemError(
|
||||
f"codesign command ({cmd_args}) failed with error code {p.returncode}!\noutput: {p.stdout}"
|
||||
)
|
||||
|
||||
# Ensure that code-signing information is *NOT* embedded in the files' extended attributes.
|
||||
#
|
||||
# This happens when files other than binaries are present in `Contents/MacOS` or `Contents/Frameworks`
|
||||
# directory; as the signature cannot be embedded within the file itself (contrary to binaries with
|
||||
# `LC_CODE_SIGNATURE` section in their header), it ends up stores in the file's extended attributes. However,
|
||||
# if such bundle is transferred using a method that does not support extended attributes (for example, a zip
|
||||
# file), the signatures on these files are lost, and the signature of the bundle as a whole becomes invalid.
|
||||
# This is the primary reason why we need to relocate non-binaries into `Contents/Resources` - the signatures
|
||||
# for files in that directory end up stored in `Contents/_CodeSignature/CodeResources` file.
|
||||
#
|
||||
# This check therefore aims to ensure that all files have been properly relocated to their corresponding
|
||||
# locations w.r.t. the code-signing requirements.
|
||||
|
||||
try:
|
||||
import xattr
|
||||
except ModuleNotFoundError:
|
||||
logger.info("xattr package not available; skipping verification of extended attributes!")
|
||||
return
|
||||
|
||||
CODESIGN_ATTRS = (
|
||||
"com.apple.cs.CodeDirectory",
|
||||
"com.apple.cs.CodeRequirements",
|
||||
"com.apple.cs.CodeRequirements-1",
|
||||
"com.apple.cs.CodeSignature",
|
||||
)
|
||||
|
||||
for entry in pathlib.Path(bundle_dir).rglob("*"):
|
||||
if not entry.is_file():
|
||||
continue
|
||||
|
||||
file_attrs = xattr.listxattr(entry)
|
||||
if any([codesign_attr in file_attrs for codesign_attr in CODESIGN_ATTRS]):
|
||||
raise ValueError(f"Code-sign attributes found in extended attributes of {str(entry)!r}!")
|
||||
476
venv/lib/python3.12/site-packages/PyInstaller/building/splash.py
Executable file
476
venv/lib/python3.12/site-packages/PyInstaller/building/splash.py
Executable file
@@ -0,0 +1,476 @@
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2005-2023, PyInstaller Development Team.
|
||||
#
|
||||
# Distributed under the terms of the GNU General Public License (version 2
|
||||
# or later) with exception for distributing the bootloader.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#
|
||||
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
|
||||
# -----------------------------------------------------------------------------
|
||||
import io
|
||||
import os
|
||||
import re
|
||||
import struct
|
||||
import pathlib
|
||||
|
||||
from PyInstaller import log as logging
|
||||
from PyInstaller.archive.writers import SplashWriter
|
||||
from PyInstaller.building import splash_templates
|
||||
from PyInstaller.building.datastruct import Target
|
||||
from PyInstaller.building.utils import _check_guts_eq, _check_guts_toc, misc
|
||||
from PyInstaller.compat import is_aix, is_darwin
|
||||
from PyInstaller.depend import bindepend
|
||||
from PyInstaller.utils.hooks.tcl_tk import tcltk_info
|
||||
|
||||
try:
|
||||
from PIL import Image as PILImage
|
||||
except ImportError:
|
||||
PILImage = None
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# These requirement files are checked against the current splash screen script. If you wish to modify the splash screen
|
||||
# and run into tcl errors/bad behavior, this is a good place to start and add components your implementation of the
|
||||
# splash screen might use.
|
||||
# NOTE: these paths use the *destination* layout for Tcl/Tk scripts, which uses unversioned tcl and tk directories
|
||||
# (see `PyInstaller.utils.hooks.tcl_tk.collect_tcl_tk_files`).
|
||||
splash_requirements = [
|
||||
# prepended tcl/tk binaries
|
||||
os.path.join(tcltk_info.TK_ROOTNAME, "license.terms"),
|
||||
os.path.join(tcltk_info.TK_ROOTNAME, "text.tcl"),
|
||||
os.path.join(tcltk_info.TK_ROOTNAME, "tk.tcl"),
|
||||
# Used for customizable font
|
||||
os.path.join(tcltk_info.TK_ROOTNAME, "ttk", "ttk.tcl"),
|
||||
os.path.join(tcltk_info.TK_ROOTNAME, "ttk", "fonts.tcl"),
|
||||
os.path.join(tcltk_info.TK_ROOTNAME, "ttk", "cursors.tcl"),
|
||||
os.path.join(tcltk_info.TK_ROOTNAME, "ttk", "utils.tcl"),
|
||||
]
|
||||
|
||||
|
||||
class Splash(Target):
|
||||
"""
|
||||
Bundles the required resources for the splash screen into a file, which will be included in the CArchive.
|
||||
|
||||
A Splash has two outputs, one is itself and one is stored in splash.binaries. Both need to be passed to other
|
||||
build targets in order to enable the splash screen.
|
||||
"""
|
||||
def __init__(self, image_file, binaries, datas, **kwargs):
|
||||
"""
|
||||
:param str image_file:
|
||||
A path-like object to the image to be used. Only the PNG file format is supported.
|
||||
|
||||
.. note:: If a different file format is supplied and PIL (Pillow) is installed, the file will be converted
|
||||
automatically.
|
||||
|
||||
.. note:: *Windows*: The color ``'magenta'`` / ``'#ff00ff'`` must not be used in the image or text, as it is
|
||||
used by splash screen to indicate transparent areas. Use a similar color (e.g., ``'#ff00fe'``) instead.
|
||||
|
||||
.. note:: If PIL (Pillow) is installed and the image is bigger than max_img_size, the image will be resized
|
||||
to fit into the specified area.
|
||||
:param list binaries:
|
||||
The TOC list of binaries the Analysis build target found. This TOC includes all extension modules and their
|
||||
binary dependencies. This is required to determine whether the user's program uses `tkinter`.
|
||||
:param list datas:
|
||||
The TOC list of data the Analysis build target found. This TOC includes all data-file dependencies of the
|
||||
modules. This is required to check if all splash screen requirements can be bundled.
|
||||
|
||||
:keyword text_pos:
|
||||
An optional two-integer tuple that represents the origin of the text on the splash screen image. The
|
||||
origin of the text is its lower left corner. A unit in the respective coordinate system is a pixel of the
|
||||
image, its origin lies in the top left corner of the image. This parameter also acts like a switch for
|
||||
the text feature. If omitted, no text will be displayed on the splash screen. This text will be used to
|
||||
show textual progress in onefile mode.
|
||||
:type text_pos: Tuple[int, int]
|
||||
:keyword text_size:
|
||||
The desired size of the font. If the size argument is a positive number, it is interpreted as a size in
|
||||
points. If size is a negative number, its absolute value is interpreted as a size in pixels. Default: ``12``
|
||||
:type text_size: int
|
||||
:keyword text_font:
|
||||
An optional name of a font for the text. This font must be installed on the user system, otherwise the
|
||||
system default font is used. If this parameter is omitted, the default font is also used.
|
||||
:keyword text_color:
|
||||
An optional color for the text. HTML color codes (``'#40e0d0'``) and color names (``'turquoise'``) are
|
||||
supported. Default: ``'black'``
|
||||
(Windows: the color ``'magenta'`` / ``'#ff00ff'`` is used to indicate transparency, and should not be used)
|
||||
:type text_color: str
|
||||
:keyword text_default:
|
||||
The default text which will be displayed before the extraction starts. Default: ``"Initializing"``
|
||||
:type text_default: str
|
||||
:keyword full_tk:
|
||||
By default Splash bundles only the necessary files for the splash screen (some tk components). This
|
||||
options enables adding full tk and making it a requirement, meaning all tk files will be unpacked before
|
||||
the splash screen can be started. This is useful during development of the splash screen script.
|
||||
Default: ``False``
|
||||
:type full_tk: bool
|
||||
:keyword minify_script:
|
||||
The splash screen is created by executing an Tcl/Tk script. This option enables minimizing the script,
|
||||
meaning removing all non essential parts from the script. Default: ``True``
|
||||
:keyword name:
|
||||
An optional alternative filename for the .res file. If not specified, a name is generated.
|
||||
:type name: str
|
||||
:keyword script_name:
|
||||
An optional alternative filename for the Tcl script, that will be generated. If not specified, a name is
|
||||
generated.
|
||||
:type script_name: str
|
||||
:keyword max_img_size:
|
||||
Maximum size of the splash screen image as a tuple. If the supplied image exceeds this limit, it will be
|
||||
resized to fit the maximum width (to keep the original aspect ratio). This option can be disabled by
|
||||
setting it to None. Default: ``(760, 480)``
|
||||
:type max_img_size: Tuple[int, int]
|
||||
:keyword always_on_top:
|
||||
Force the splashscreen to be always on top of other windows. If disabled, other windows (e.g., from other
|
||||
applications) can cover the splash screen by user bringing them to front. This might be useful for
|
||||
frozen applications with long startup times. Default: ``True``
|
||||
:type always_on_top: bool
|
||||
"""
|
||||
from ..config import CONF
|
||||
Target.__init__(self)
|
||||
|
||||
# Splash screen is not supported on macOS. It operates in a secondary thread and macOS disallows UI operations
|
||||
# in any thread other than main.
|
||||
if is_darwin:
|
||||
raise SystemExit("ERROR: Splash screen is not supported on macOS.")
|
||||
|
||||
# Ensure tkinter (and thus Tcl/Tk) is available.
|
||||
if not tcltk_info.available:
|
||||
raise SystemExit(
|
||||
"ERROR: Your platform does not support the splash screen feature, since tkinter is not installed. "
|
||||
"Please install tkinter and try again."
|
||||
)
|
||||
|
||||
# Check if the Tcl/Tk version is supported.
|
||||
logger.info("Verifying Tcl/Tk compatibility with splash screen requirements")
|
||||
self._check_tcl_tk_compatibility()
|
||||
|
||||
# Make image path relative to .spec file
|
||||
if not os.path.isabs(image_file):
|
||||
image_file = os.path.join(CONF['specpath'], image_file)
|
||||
image_file = os.path.normpath(image_file)
|
||||
if not os.path.exists(image_file):
|
||||
raise ValueError("Image file '%s' not found" % image_file)
|
||||
|
||||
# Copy all arguments
|
||||
self.image_file = image_file
|
||||
self.full_tk = kwargs.get("full_tk", False)
|
||||
self.name = kwargs.get("name", None)
|
||||
self.script_name = kwargs.get("script_name", None)
|
||||
self.minify_script = kwargs.get("minify_script", True)
|
||||
self.max_img_size = kwargs.get("max_img_size", (760, 480))
|
||||
|
||||
# text options
|
||||
self.text_pos = kwargs.get("text_pos", None)
|
||||
self.text_size = kwargs.get("text_size", 12)
|
||||
self.text_font = kwargs.get("text_font", "TkDefaultFont")
|
||||
self.text_color = kwargs.get("text_color", "black")
|
||||
self.text_default = kwargs.get("text_default", "Initializing")
|
||||
|
||||
# always-on-top behavior
|
||||
self.always_on_top = kwargs.get("always_on_top", True)
|
||||
|
||||
# Save the generated file separately so that it is not necessary to generate the data again and again
|
||||
root = os.path.splitext(self.tocfilename)[0]
|
||||
if self.name is None:
|
||||
self.name = root + '.res'
|
||||
if self.script_name is None:
|
||||
self.script_name = root + '_script.tcl'
|
||||
|
||||
# Internal variables
|
||||
# Store path to _tkinter extension module, so that guts check can detect if the path changed for some reason.
|
||||
self._tkinter_file = tcltk_info.tkinter_extension_file
|
||||
|
||||
# Calculated / analysed values
|
||||
self.uses_tkinter = self._uses_tkinter(self._tkinter_file, binaries)
|
||||
logger.debug("Program uses tkinter: %r", self.uses_tkinter)
|
||||
self.script = self.generate_script()
|
||||
self.tcl_lib = tcltk_info.tcl_shared_library # full path to shared library
|
||||
self.tk_lib = tcltk_info.tk_shared_library
|
||||
|
||||
assert self.tcl_lib is not None
|
||||
assert self.tk_lib is not None
|
||||
|
||||
logger.debug("Using Tcl shared library: %r", self.tcl_lib)
|
||||
logger.debug("Using Tk shared library: %r", self.tk_lib)
|
||||
|
||||
self.splash_requirements = set([
|
||||
# NOTE: the implicit assumption here is that Tcl and Tk shared library are collected into top-level
|
||||
# application directory, which, at tme moment, is true in practically all cases.
|
||||
os.path.basename(self.tcl_lib),
|
||||
os.path.basename(self.tk_lib),
|
||||
*splash_requirements,
|
||||
])
|
||||
|
||||
logger.info("Collect Tcl/Tk data files for the splash screen")
|
||||
tcltk_tree = tcltk_info.data_files # 3-element tuple TOC
|
||||
if self.full_tk:
|
||||
# The user wants a full copy of Tk, so make all Tk files a requirement.
|
||||
self.splash_requirements.update(entry[0] for entry in tcltk_tree)
|
||||
|
||||
# Scan for binary dependencies of the Tcl/Tk shared libraries, and add them to `binaries` TOC list (which
|
||||
# should really be called `dependencies` as it is not limited to binaries. But it is too late now, and
|
||||
# existing spec files depend on this naming). We specify these binary dependencies (which include the
|
||||
# Tcl and Tk shared libraries themselves) even if the user's program uses tkinter and they would be collected
|
||||
# anyway; let the collection mechanism deal with potential duplicates.
|
||||
tcltk_libs = [(os.path.basename(src_name), src_name, 'BINARY') for src_name in (self.tcl_lib, self.tk_lib)]
|
||||
self.binaries = bindepend.binary_dependency_analysis(tcltk_libs)
|
||||
|
||||
# Put all shared library dependencies in `splash_requirements`, so they are made available in onefile mode.
|
||||
self.splash_requirements.update(entry[0] for entry in self.binaries)
|
||||
|
||||
# If the user's program does not use tkinter, add resources from Tcl/Tk tree to the dependencies list.
|
||||
# Do so only for the resources that are part of splash requirements.
|
||||
if not self.uses_tkinter:
|
||||
self.binaries.extend(entry for entry in tcltk_tree if entry[0] in self.splash_requirements)
|
||||
|
||||
# Check if all requirements were found.
|
||||
collected_files = set(entry[0] for entry in (binaries + datas + self.binaries))
|
||||
|
||||
def _filter_requirement(filename):
|
||||
if filename not in collected_files:
|
||||
# Item is not bundled, so warn the user about it. This actually may happen on some tkinter installations
|
||||
# that are missing the license.terms file - as this file has no effect on operation of splash screen,
|
||||
# suppress the warning for it.
|
||||
if os.path.basename(filename) == 'license.terms':
|
||||
return False
|
||||
|
||||
logger.warning(
|
||||
"The local Tcl/Tk installation is missing the file %s. The behavior of the splash screen is "
|
||||
"therefore undefined and may be unsupported.", filename
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
# Remove all files which were not found.
|
||||
self.splash_requirements = set(filter(_filter_requirement, self.splash_requirements))
|
||||
|
||||
logger.debug("Splash Requirements: %s", self.splash_requirements)
|
||||
|
||||
# On AIX, the Tcl and Tk shared libraries might in fact be ar archives with shared object inside it, and need to
|
||||
# be `dlopen`'ed with full name (for example, `libtcl.a(libtcl.so.8.6)` and `libtk.a(libtk.so.8.6)`. So if the
|
||||
# library's suffix is .a, adjust the name accordingly, assuming fixed format for the shared object name.
|
||||
# Adjust the names at the end of this method, because preceding steps use `self.tcl_lib` and `self.tk_lib` for
|
||||
# filesystem-based operations and need the original filenames.
|
||||
if is_aix:
|
||||
_, ext = os.path.splitext(self.tcl_lib)
|
||||
if ext == '.a':
|
||||
tcl_major, tcl_minor = tcltk_info.tcl_version
|
||||
self.tcl_lib += f"(libtcl.so.{tcl_major}.{tcl_minor})"
|
||||
_, ext = os.path.splitext(self.tk_lib)
|
||||
if ext == '.a':
|
||||
tk_major, tk_minor = tcltk_info.tk_version
|
||||
self.tk_lib += f"(libtk.so.{tk_major}.{tk_minor})"
|
||||
|
||||
self.__postinit__()
|
||||
|
||||
_GUTS = (
|
||||
# input parameters
|
||||
('image_file', _check_guts_eq),
|
||||
('name', _check_guts_eq),
|
||||
('script_name', _check_guts_eq),
|
||||
('text_pos', _check_guts_eq),
|
||||
('text_size', _check_guts_eq),
|
||||
('text_font', _check_guts_eq),
|
||||
('text_color', _check_guts_eq),
|
||||
('text_default', _check_guts_eq),
|
||||
('always_on_top', _check_guts_eq),
|
||||
('full_tk', _check_guts_eq),
|
||||
('minify_script', _check_guts_eq),
|
||||
('max_img_size', _check_guts_eq),
|
||||
# calculated/analysed values
|
||||
('uses_tkinter', _check_guts_eq),
|
||||
('script', _check_guts_eq),
|
||||
('tcl_lib', _check_guts_eq),
|
||||
('tk_lib', _check_guts_eq),
|
||||
('splash_requirements', _check_guts_eq),
|
||||
('binaries', _check_guts_toc),
|
||||
# internal value
|
||||
# Check if the tkinter installation changed. This is theoretically possible if someone uses two different python
|
||||
# installations of the same version.
|
||||
('_tkinter_file', _check_guts_eq),
|
||||
)
|
||||
|
||||
def _check_guts(self, data, last_build):
|
||||
if Target._check_guts(self, data, last_build):
|
||||
return True
|
||||
|
||||
# Check if the image has been modified.
|
||||
if misc.mtime(self.image_file) > last_build:
|
||||
logger.info("Building %s because file %s changed", self.tocbasename, self.image_file)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def assemble(self):
|
||||
logger.info("Building Splash %s", self.name)
|
||||
|
||||
# Function to resize a given image to fit into the area defined by max_img_size.
|
||||
def _resize_image(_image, _orig_size):
|
||||
if PILImage:
|
||||
_w, _h = _orig_size
|
||||
_ratio_w = self.max_img_size[0] / _w
|
||||
if _ratio_w < 1:
|
||||
# Image width exceeds limit
|
||||
_h = int(_h * _ratio_w)
|
||||
_w = self.max_img_size[0]
|
||||
|
||||
_ratio_h = self.max_img_size[1] / _h
|
||||
if _ratio_h < 1:
|
||||
# Image height exceeds limit
|
||||
_w = int(_w * _ratio_h)
|
||||
_h = self.max_img_size[1]
|
||||
|
||||
# If a file is given it will be open
|
||||
if isinstance(_image, PILImage.Image):
|
||||
_img = _image
|
||||
else:
|
||||
_img = PILImage.open(_image)
|
||||
_img_resized = _img.resize((_w, _h))
|
||||
|
||||
# Save image into a stream
|
||||
_image_stream = io.BytesIO()
|
||||
_img_resized.save(_image_stream, format='PNG')
|
||||
_img.close()
|
||||
_img_resized.close()
|
||||
_image_data = _image_stream.getvalue()
|
||||
logger.info("Resized image %s from dimensions %r to (%d, %d)", self.image_file, _orig_size, _w, _h)
|
||||
return _image_data
|
||||
else:
|
||||
raise ValueError(
|
||||
"The splash image dimensions (w: %d, h: %d) exceed max_img_size (w: %d, h:%d), but the image "
|
||||
"cannot be resized due to missing PIL.Image! Either install the Pillow package, adjust the "
|
||||
"max_img_size, or use an image of compatible dimensions." %
|
||||
(_orig_size[0], _orig_size[1], self.max_img_size[0], self.max_img_size[1])
|
||||
)
|
||||
|
||||
# Open image file
|
||||
image_file = open(self.image_file, 'rb')
|
||||
|
||||
# Check header of the file to identify it
|
||||
if image_file.read(8) == b'\x89PNG\r\n\x1a\n':
|
||||
# self.image_file is a PNG file
|
||||
image_file.seek(16)
|
||||
img_size = (struct.unpack("!I", image_file.read(4))[0], struct.unpack("!I", image_file.read(4))[0])
|
||||
|
||||
if img_size > self.max_img_size:
|
||||
# The image exceeds the maximum image size, so resize it
|
||||
image = _resize_image(self.image_file, img_size)
|
||||
else:
|
||||
image = os.path.abspath(self.image_file)
|
||||
elif PILImage:
|
||||
# Pillow is installed, meaning the image can be converted automatically
|
||||
img = PILImage.open(self.image_file, mode='r')
|
||||
|
||||
if img.size > self.max_img_size:
|
||||
image = _resize_image(img, img.size)
|
||||
else:
|
||||
image_data = io.BytesIO()
|
||||
img.save(image_data, format='PNG')
|
||||
img.close()
|
||||
image = image_data.getvalue()
|
||||
logger.info("Converted image %s to PNG format", self.image_file)
|
||||
else:
|
||||
raise ValueError(
|
||||
"The image %s needs to be converted to a PNG file, but PIL.Image is not available! Either install the "
|
||||
"Pillow package, or use a PNG image for you splash screen." % (self.image_file,)
|
||||
)
|
||||
|
||||
image_file.close()
|
||||
|
||||
SplashWriter(
|
||||
self.name,
|
||||
self.splash_requirements,
|
||||
os.path.basename(self.tcl_lib), # tcl86t.dll
|
||||
os.path.basename(self.tk_lib), # tk86t.dll
|
||||
tcltk_info.TK_ROOTNAME,
|
||||
image,
|
||||
self.script
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _check_tcl_tk_compatibility():
|
||||
tcl_version = tcltk_info.tcl_version # (major, minor) tuple
|
||||
tk_version = tcltk_info.tk_version
|
||||
|
||||
if is_darwin and tcltk_info.is_macos_system_framework:
|
||||
# Outdated Tcl/Tk 8.5 system framework is not supported.
|
||||
raise SystemExit(
|
||||
"ERROR: The splash screen feature does not support macOS system framework version of Tcl/Tk."
|
||||
)
|
||||
|
||||
# Test if tcl/tk version is supported
|
||||
if tcl_version < (8, 6) or tk_version < (8, 6):
|
||||
logger.warning(
|
||||
"The installed Tcl/Tk (%d.%d / %d.%d) version might not work with the splash screen feature of the "
|
||||
"bootloader, which was tested against Tcl/Tk 8.6", *tcl_version, *tk_version
|
||||
)
|
||||
|
||||
# This should be impossible, since tcl/tk is released together with the same version number, but just in case
|
||||
if tcl_version != tk_version:
|
||||
logger.warning(
|
||||
"The installed version of Tcl (%d.%d) and Tk (%d.%d) do not match. PyInstaller is tested against "
|
||||
"matching versions", *tcl_version, *tk_version
|
||||
)
|
||||
|
||||
# Ensure that Tcl is built with multi-threading support.
|
||||
if not tcltk_info.tcl_threaded:
|
||||
# This is a feature breaking problem, so exit.
|
||||
raise SystemExit(
|
||||
"ERROR: The installed Tcl version is not threaded. PyInstaller only supports the splash screen "
|
||||
"using threaded Tcl."
|
||||
)
|
||||
|
||||
# Ensure that Tcl and Tk shared libraries are available
|
||||
if tcltk_info.tcl_shared_library is None or tcltk_info.tk_shared_library is None:
|
||||
message = \
|
||||
"ERROR: Could not determine the path to Tcl and/or Tk shared library, " \
|
||||
"which are required for splash screen."
|
||||
if not tcltk_info.tkinter_extension_file:
|
||||
message += (
|
||||
" The _tkinter module appears to be a built-in, which likely means that python was built with "
|
||||
"statically-linked Tcl/Tk libraries and is incompatible with splash screen."
|
||||
)
|
||||
raise SystemExit(message)
|
||||
|
||||
def generate_script(self):
|
||||
"""
|
||||
Generate the script for the splash screen.
|
||||
|
||||
If minify_script is True, all unnecessary parts will be removed.
|
||||
"""
|
||||
d = {}
|
||||
if self.text_pos is not None:
|
||||
logger.debug("Add text support to splash screen")
|
||||
d.update({
|
||||
'pad_x': self.text_pos[0],
|
||||
'pad_y': self.text_pos[1],
|
||||
'color': self.text_color,
|
||||
'font': self.text_font,
|
||||
'font_size': self.text_size,
|
||||
'default_text': self.text_default,
|
||||
})
|
||||
script = splash_templates.build_script(text_options=d, always_on_top=self.always_on_top)
|
||||
|
||||
if self.minify_script:
|
||||
# Remove any documentation, empty lines and unnecessary spaces
|
||||
script = '\n'.join(
|
||||
line for line in map(lambda line: line.strip(), script.splitlines())
|
||||
if not line.startswith('#') # documentation
|
||||
and line # empty lines
|
||||
)
|
||||
# Remove unnecessary spaces
|
||||
script = re.sub(' +', ' ', script)
|
||||
|
||||
# Write script to disk, so that it is transparent to the user what script is executed.
|
||||
with open(self.script_name, "w", encoding="utf-8") as script_file:
|
||||
script_file.write(script)
|
||||
return script
|
||||
|
||||
@staticmethod
|
||||
def _uses_tkinter(tkinter_file, binaries):
|
||||
# Test for _tkinter extension instead of tkinter module, because user might use a different wrapping library for
|
||||
# Tk. Use `pathlib.PurePath` in comparisons to account for case normalization and separator normalization.
|
||||
tkinter_file = pathlib.PurePath(tkinter_file)
|
||||
for dest_name, src_name, typecode in binaries:
|
||||
if pathlib.PurePath(src_name) == tkinter_file:
|
||||
return True
|
||||
return False
|
||||
229
venv/lib/python3.12/site-packages/PyInstaller/building/splash_templates.py
Executable file
229
venv/lib/python3.12/site-packages/PyInstaller/building/splash_templates.py
Executable file
@@ -0,0 +1,229 @@
|
||||
# -----------------------------------------------------------------------------
|
||||
# Copyright (c) 2005-2023, PyInstaller Development Team.
|
||||
#
|
||||
# Distributed under the terms of the GNU General Public License (version 2
|
||||
# or later) with exception for distributing the bootloader.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#
|
||||
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
|
||||
# -----------------------------------------------------------------------------
|
||||
"""
|
||||
Templates for the splash screen tcl script.
|
||||
"""
|
||||
from PyInstaller.compat import is_cygwin, is_darwin, is_win
|
||||
|
||||
ipc_script = r"""
|
||||
proc _ipc_server {channel clientaddr clientport} {
|
||||
# This function is called if a new client connects to
|
||||
# the server. This creates a channel, which calls
|
||||
# _ipc_caller if data was send through the connection
|
||||
set client_name [format <%s:%d> $clientaddr $clientport]
|
||||
|
||||
chan configure $channel \
|
||||
-buffering none \
|
||||
-encoding utf-8 \
|
||||
-eofchar \x04 \
|
||||
-translation cr
|
||||
chan event $channel readable [list _ipc_caller $channel $client_name]
|
||||
}
|
||||
|
||||
proc _ipc_caller {channel client_name} {
|
||||
# This function is called if a command was sent through
|
||||
# the tcp connection. The current implementation supports
|
||||
# two commands: update_text and exit, although exit
|
||||
# is implemented to be called if the connection gets
|
||||
# closed (from python) or the character 0x04 was received
|
||||
chan gets $channel cmd
|
||||
|
||||
if {[chan eof $channel]} {
|
||||
# This is entered if either the connection was closed
|
||||
# or the char 0x04 was send
|
||||
chan close $channel
|
||||
exit
|
||||
|
||||
} elseif {![chan blocked $channel]} {
|
||||
# RPC methods
|
||||
|
||||
# update_text command
|
||||
if {[string match "update_text*" $cmd]} {
|
||||
global status_text
|
||||
set first [expr {[string first "(" $cmd] + 1}]
|
||||
set last [expr {[string last ")" $cmd] - 1}]
|
||||
|
||||
set status_text [string range $cmd $first $last]
|
||||
}
|
||||
# Implement other procedures here
|
||||
}
|
||||
}
|
||||
|
||||
# By setting the port to 0 the os will assign a free port
|
||||
set server_socket [socket -server _ipc_server -myaddr localhost 0]
|
||||
set server_port [fconfigure $server_socket -sockname]
|
||||
|
||||
# This environment variable is shared between the python and the tcl
|
||||
# interpreter and publishes the port the tcp server socket is available
|
||||
set env(_PYI_SPLASH_IPC) [lindex $server_port 2]
|
||||
"""
|
||||
|
||||
image_script = r"""
|
||||
# The variable $_image_data, which holds the data for the splash
|
||||
# image is created by the bootloader.
|
||||
image create photo splash_image
|
||||
splash_image put $_image_data
|
||||
# delete the variable, because the image now holds the data
|
||||
unset _image_data
|
||||
|
||||
proc canvas_text_update {canvas tag _var - -} {
|
||||
# This function is rigged to be called if the a variable
|
||||
# status_text gets changed. This updates the text on
|
||||
# the canvas
|
||||
upvar $_var var
|
||||
$canvas itemconfigure $tag -text $var
|
||||
}
|
||||
"""
|
||||
|
||||
splash_canvas_setup = r"""
|
||||
package require Tk
|
||||
|
||||
set image_width [image width splash_image]
|
||||
set image_height [image height splash_image]
|
||||
set display_width [winfo screenwidth .]
|
||||
set display_height [winfo screenheight .]
|
||||
|
||||
set x_position [expr {int(0.5*($display_width - $image_width))}]
|
||||
set y_position [expr {int(0.5*($display_height - $image_height))}]
|
||||
|
||||
# Toplevel frame in which all widgets should be positioned
|
||||
frame .root
|
||||
|
||||
# Configure the canvas on which the splash
|
||||
# screen will be drawn
|
||||
canvas .root.canvas \
|
||||
-width $image_width \
|
||||
-height $image_height \
|
||||
-borderwidth 0 \
|
||||
-highlightthickness 0
|
||||
|
||||
# Draw the image into the canvas, filling it.
|
||||
.root.canvas create image \
|
||||
[expr {$image_width / 2}] \
|
||||
[expr {$image_height / 2}] \
|
||||
-image splash_image
|
||||
"""
|
||||
|
||||
splash_canvas_text = r"""
|
||||
# Create a text on the canvas, which tracks the local
|
||||
# variable status_text. status_text is changed via C to
|
||||
# update the progress on the splash screen.
|
||||
# We cannot use the default label, because it has a
|
||||
# default background, which cannot be turned transparent
|
||||
.root.canvas create text \
|
||||
%(pad_x)d \
|
||||
%(pad_y)d \
|
||||
-fill %(color)s \
|
||||
-justify center \
|
||||
-font myFont \
|
||||
-tag vartext \
|
||||
-anchor sw
|
||||
trace variable status_text w \
|
||||
[list canvas_text_update .root.canvas vartext]
|
||||
set status_text "%(default_text)s"
|
||||
"""
|
||||
|
||||
splash_canvas_default_font = r"""
|
||||
font create myFont {*}[font actual TkDefaultFont]
|
||||
font configure myFont -size %(font_size)d
|
||||
"""
|
||||
|
||||
splash_canvas_custom_font = r"""
|
||||
font create myFont -family %(font)s -size %(font_size)d
|
||||
"""
|
||||
|
||||
if is_win or is_cygwin:
|
||||
transparent_setup = r"""
|
||||
# If the image is transparent, the background will be filled
|
||||
# with magenta. The magenta background is later replaced with transparency.
|
||||
# Here is the limitation of this implementation, that only
|
||||
# sharp transparent image corners are possible
|
||||
wm attributes . -transparentcolor magenta
|
||||
.root.canvas configure -background magenta
|
||||
"""
|
||||
|
||||
elif is_darwin:
|
||||
# This is untested, but should work following: https://stackoverflow.com/a/44296157/5869139
|
||||
transparent_setup = r"""
|
||||
wm attributes . -transparent 1
|
||||
. configure -background systemTransparent
|
||||
.root.canvas configure -background systemTransparent
|
||||
"""
|
||||
|
||||
else:
|
||||
# For Linux there is no common way to create a transparent window
|
||||
transparent_setup = r""
|
||||
|
||||
pack_widgets = r"""
|
||||
# Position all widgets in the window
|
||||
pack .root
|
||||
grid .root.canvas -column 0 -row 0 -columnspan 1 -rowspan 2
|
||||
"""
|
||||
|
||||
# Enable always-on-top behavior, by setting overrideredirect and the topmost attribute.
|
||||
position_window_on_top = r"""
|
||||
# Set position and mode of the window - always-on-top behavior
|
||||
wm overrideredirect . 1
|
||||
wm geometry . +${x_position}+${y_position}
|
||||
wm attributes . -topmost 1
|
||||
"""
|
||||
|
||||
# Disable always-on-top behavior
|
||||
if is_win or is_cygwin or is_darwin:
|
||||
# On Windows, we disable the always-on-top behavior while still setting overrideredirect
|
||||
# (to disable window decorations), but set topmost attribute to 0.
|
||||
position_window = r"""
|
||||
# Set position and mode of the window
|
||||
wm overrideredirect . 1
|
||||
wm geometry . +${x_position}+${y_position}
|
||||
wm attributes . -topmost 0
|
||||
"""
|
||||
else:
|
||||
# On Linux, we must not use overrideredirect; instead, we set X11-specific type attribute to splash,
|
||||
# which lets the window manager to properly handle the splash screen (without window decorations
|
||||
# but allowing other windows to be brought to front).
|
||||
position_window = r"""
|
||||
# Set position and mode of the window
|
||||
wm geometry . +${x_position}+${y_position}
|
||||
wm attributes . -type splash
|
||||
"""
|
||||
|
||||
raise_window = r"""
|
||||
raise .
|
||||
"""
|
||||
|
||||
|
||||
def build_script(text_options=None, always_on_top=False):
|
||||
"""
|
||||
This function builds the tcl script for the splash screen.
|
||||
"""
|
||||
# Order is important!
|
||||
script = [
|
||||
ipc_script,
|
||||
image_script,
|
||||
splash_canvas_setup,
|
||||
]
|
||||
|
||||
if text_options:
|
||||
# If the default font is used we need a different syntax
|
||||
if text_options['font'] == "TkDefaultFont":
|
||||
script.append(splash_canvas_default_font % text_options)
|
||||
else:
|
||||
script.append(splash_canvas_custom_font % text_options)
|
||||
script.append(splash_canvas_text % text_options)
|
||||
|
||||
script.append(transparent_setup)
|
||||
|
||||
script.append(pack_widgets)
|
||||
script.append(position_window_on_top if always_on_top else position_window)
|
||||
script.append(raise_window)
|
||||
|
||||
return '\n'.join(script)
|
||||
126
venv/lib/python3.12/site-packages/PyInstaller/building/templates.py
Executable file
126
venv/lib/python3.12/site-packages/PyInstaller/building/templates.py
Executable file
@@ -0,0 +1,126 @@
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2005-2023, PyInstaller Development Team.
|
||||
#
|
||||
# Distributed under the terms of the GNU General Public License (version 2
|
||||
# or later) with exception for distributing the bootloader.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#
|
||||
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
|
||||
#-----------------------------------------------------------------------------
|
||||
"""
|
||||
Templates to generate .spec files.
|
||||
"""
|
||||
|
||||
onefiletmplt = """# -*- mode: python ; coding: utf-8 -*-
|
||||
%(preamble)s
|
||||
|
||||
a = Analysis(
|
||||
%(scripts)s,
|
||||
pathex=%(pathex)s,
|
||||
binaries=%(binaries)s,
|
||||
datas=%(datas)s,
|
||||
hiddenimports=%(hiddenimports)s,
|
||||
hookspath=%(hookspath)r,
|
||||
hooksconfig={},
|
||||
runtime_hooks=%(runtime_hooks)r,
|
||||
excludes=%(excludes)s,
|
||||
noarchive=%(noarchive)s,
|
||||
optimize=%(optimize)r,
|
||||
)
|
||||
pyz = PYZ(a.pure)
|
||||
%(splash_init)s
|
||||
exe = EXE(
|
||||
pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.datas,%(splash_target)s%(splash_binaries)s
|
||||
%(options)s,
|
||||
name='%(name)s',
|
||||
debug=%(debug_bootloader)s,
|
||||
bootloader_ignore_signals=%(bootloader_ignore_signals)s,
|
||||
strip=%(strip)s,
|
||||
upx=%(upx)s,
|
||||
upx_exclude=%(upx_exclude)s,
|
||||
runtime_tmpdir=%(runtime_tmpdir)r,
|
||||
console=%(console)s,
|
||||
disable_windowed_traceback=%(disable_windowed_traceback)s,
|
||||
argv_emulation=%(argv_emulation)r,
|
||||
target_arch=%(target_arch)r,
|
||||
codesign_identity=%(codesign_identity)r,
|
||||
entitlements_file=%(entitlements_file)r,%(exe_options)s
|
||||
)
|
||||
"""
|
||||
|
||||
onedirtmplt = """# -*- mode: python ; coding: utf-8 -*-
|
||||
%(preamble)s
|
||||
|
||||
a = Analysis(
|
||||
%(scripts)s,
|
||||
pathex=%(pathex)s,
|
||||
binaries=%(binaries)s,
|
||||
datas=%(datas)s,
|
||||
hiddenimports=%(hiddenimports)s,
|
||||
hookspath=%(hookspath)r,
|
||||
hooksconfig={},
|
||||
runtime_hooks=%(runtime_hooks)r,
|
||||
excludes=%(excludes)s,
|
||||
noarchive=%(noarchive)s,
|
||||
optimize=%(optimize)r,
|
||||
)
|
||||
pyz = PYZ(a.pure)
|
||||
%(splash_init)s
|
||||
exe = EXE(
|
||||
pyz,
|
||||
a.scripts,%(splash_target)s
|
||||
%(options)s,
|
||||
exclude_binaries=True,
|
||||
name='%(name)s',
|
||||
debug=%(debug_bootloader)s,
|
||||
bootloader_ignore_signals=%(bootloader_ignore_signals)s,
|
||||
strip=%(strip)s,
|
||||
upx=%(upx)s,
|
||||
console=%(console)s,
|
||||
disable_windowed_traceback=%(disable_windowed_traceback)s,
|
||||
argv_emulation=%(argv_emulation)r,
|
||||
target_arch=%(target_arch)r,
|
||||
codesign_identity=%(codesign_identity)r,
|
||||
entitlements_file=%(entitlements_file)r,%(exe_options)s
|
||||
)
|
||||
coll = COLLECT(
|
||||
exe,
|
||||
a.binaries,
|
||||
a.datas,%(splash_binaries)s
|
||||
strip=%(strip)s,
|
||||
upx=%(upx)s,
|
||||
upx_exclude=%(upx_exclude)s,
|
||||
name='%(name)s',
|
||||
)
|
||||
"""
|
||||
|
||||
bundleexetmplt = """app = BUNDLE(
|
||||
exe,
|
||||
name='%(name)s.app',
|
||||
icon=%(icon)s,
|
||||
bundle_identifier=%(bundle_identifier)s,
|
||||
)
|
||||
"""
|
||||
|
||||
bundletmplt = """app = BUNDLE(
|
||||
coll,
|
||||
name='%(name)s.app',
|
||||
icon=%(icon)s,
|
||||
bundle_identifier=%(bundle_identifier)s,
|
||||
)
|
||||
"""
|
||||
|
||||
splashtmpl = """splash = Splash(
|
||||
%(splash_image)r,
|
||||
binaries=a.binaries,
|
||||
datas=a.datas,
|
||||
text_pos=None,
|
||||
text_size=12,
|
||||
minify_script=True,
|
||||
always_on_top=True,
|
||||
)
|
||||
"""
|
||||
846
venv/lib/python3.12/site-packages/PyInstaller/building/utils.py
Executable file
846
venv/lib/python3.12/site-packages/PyInstaller/building/utils.py
Executable file
@@ -0,0 +1,846 @@
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright (c) 2005-2023, PyInstaller Development Team.
|
||||
#
|
||||
# Distributed under the terms of the GNU General Public License (version 2
|
||||
# or later) with exception for distributing the bootloader.
|
||||
#
|
||||
# The full license is in the file COPYING.txt, distributed with this software.
|
||||
#
|
||||
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
import fnmatch
|
||||
import glob
|
||||
import hashlib
|
||||
import io
|
||||
import marshal
|
||||
import os
|
||||
import pathlib
|
||||
import platform
|
||||
import shutil
|
||||
import struct
|
||||
import subprocess
|
||||
import sys
|
||||
import types
|
||||
import zipfile
|
||||
|
||||
from PyInstaller import compat
|
||||
from PyInstaller import log as logging
|
||||
from PyInstaller.compat import is_aix, is_darwin, is_win, is_linux
|
||||
from PyInstaller.exceptions import InvalidSrcDestTupleError
|
||||
from PyInstaller.utils import misc
|
||||
|
||||
if is_win:
|
||||
from PyInstaller.utils.win32 import versioninfo
|
||||
|
||||
if is_darwin:
|
||||
import PyInstaller.utils.osx as osxutils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# -- Helpers for checking guts.
|
||||
#
|
||||
# NOTE: by _GUTS it is meant intermediate files and data structures that PyInstaller creates for bundling files and
|
||||
# creating final executable.
|
||||
|
||||
|
||||
def _check_guts_eq(attr_name, old_value, new_value, last_build):
|
||||
"""
|
||||
Rebuild is required if values differ.
|
||||
"""
|
||||
if old_value != new_value:
|
||||
logger.info("Building because %s changed", attr_name)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _check_guts_toc_mtime(attr_name, old_toc, new_toc, last_build):
|
||||
"""
|
||||
Rebuild is required if mtimes of files listed in old TOC are newer than last_build.
|
||||
|
||||
Use this for calculated/analysed values read from cache.
|
||||
"""
|
||||
for dest_name, src_name, typecode in old_toc:
|
||||
if misc.mtime(src_name) > last_build:
|
||||
logger.info("Building because %s changed", src_name)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _check_guts_toc(attr_name, old_toc, new_toc, last_build):
|
||||
"""
|
||||
Rebuild is required if either TOC content changed or mtimes of files listed in old TOC are newer than last_build.
|
||||
|
||||
Use this for input parameters.
|
||||
"""
|
||||
return _check_guts_eq(attr_name, old_toc, new_toc, last_build) or \
|
||||
_check_guts_toc_mtime(attr_name, old_toc, new_toc, last_build)
|
||||
|
||||
|
||||
def destination_name_for_extension(module_name, src_name, typecode):
|
||||
"""
|
||||
Take a TOC entry (dest_name, src_name, typecode) and determine the full destination name for the extension.
|
||||
"""
|
||||
|
||||
assert typecode == 'EXTENSION'
|
||||
|
||||
# The `module_name` should be the extension's importable module name, such as `psutil._psutil_linux` or
|
||||
# `numpy._core._multiarray_umath`. Reconstruct the directory structure from parent package name(s), if any.
|
||||
dest_elements = module_name.split('.')
|
||||
|
||||
# We have the base name of the extension file (the last element in the module name), but we do not know the
|
||||
# full extension suffix. We can take that from source name; for simplicity, replace the whole base name part.
|
||||
src_path = pathlib.Path(src_name)
|
||||
dest_elements[-1] = src_path.name
|
||||
|
||||
# Extensions that originate from python's python3.x/lib-dynload directory should be diverted into
|
||||
# python3.x/lib-dynload destination directory instead of being collected into top-level application directory.
|
||||
# See #5604 for original motivation (using just lib-dynload), and #9204 for extension (using python3.x/lib-dynload).
|
||||
if src_path.parent.name == 'lib-dynload':
|
||||
python_dir = f'python{sys.version_info.major}.{sys.version_info.minor}'
|
||||
if src_path.parent.parent.name == python_dir:
|
||||
dest_elements = [python_dir, 'lib-dynload', *dest_elements]
|
||||
|
||||
return os.path.join(*dest_elements)
|
||||
|
||||
|
||||
def process_collected_binary(
|
||||
src_name,
|
||||
dest_name,
|
||||
use_strip=False,
|
||||
use_upx=False,
|
||||
upx_exclude=None,
|
||||
target_arch=None,
|
||||
codesign_identity=None,
|
||||
entitlements_file=None,
|
||||
strict_arch_validation=False
|
||||
):
|
||||
"""
|
||||
Process the collected binary using strip or UPX (or both), and apply any platform-specific processing. On macOS,
|
||||
this rewrites the library paths in the headers, and (re-)signs the binary. On-disk cache is used to avoid processing
|
||||
the same binary with same options over and over.
|
||||
|
||||
In addition to given arguments, this function also uses CONF['cachedir'] and CONF['upx_dir'].
|
||||
"""
|
||||
from PyInstaller.config import CONF
|
||||
|
||||
# We need to use cache in the following scenarios:
|
||||
# * extra binary processing due to use of `strip` or `upx`
|
||||
# * building on macOS, where we need to rewrite library paths in binaries' headers and (re-)sign the binaries.
|
||||
if not use_strip and not use_upx and not is_darwin:
|
||||
return src_name
|
||||
|
||||
# Match against provided UPX exclude patterns.
|
||||
upx_exclude = upx_exclude or []
|
||||
if use_upx:
|
||||
src_path = pathlib.PurePath(src_name)
|
||||
for upx_exclude_entry in upx_exclude:
|
||||
# pathlib.PurePath.match() matches from right to left, and supports * wildcard, but does not support the
|
||||
# "**" syntax for directory recursion. Case sensitivity follows the OS default.
|
||||
if src_path.match(upx_exclude_entry):
|
||||
logger.info("Disabling UPX for %s due to match in exclude pattern: %s", src_name, upx_exclude_entry)
|
||||
use_upx = False
|
||||
break
|
||||
|
||||
# Additional automatic disablement rules for UPX and strip.
|
||||
|
||||
# On Windows, avoid using UPX with binaries that have control flow guard (CFG) enabled.
|
||||
if use_upx and is_win and versioninfo.pefile_check_control_flow_guard(src_name):
|
||||
logger.info('Disabling UPX for %s due to CFG!', src_name)
|
||||
use_upx = False
|
||||
|
||||
# Avoid using UPX with Qt plugins, as it strips the data required by the Qt plugin loader.
|
||||
if use_upx and misc.is_file_qt_plugin(src_name):
|
||||
logger.info('Disabling UPX for %s due to it being a Qt plugin!', src_name)
|
||||
use_upx = False
|
||||
|
||||
# On linux, if a binary has an accompanying HMAC or CHK file, avoid modifying it in any way.
|
||||
if (use_upx or use_strip) and is_linux:
|
||||
src_path = pathlib.Path(src_name)
|
||||
hmac_path = src_path.with_name(f".{src_path.name}.hmac")
|
||||
chk_path = src_path.with_suffix(".chk")
|
||||
if hmac_path.is_file():
|
||||
logger.info('Disabling UPX and/or strip for %s due to accompanying .hmac file!', src_name)
|
||||
use_upx = use_strip = False
|
||||
elif chk_path.is_file():
|
||||
logger.info('Disabling UPX and/or strip for %s due to accompanying .chk file!', src_name)
|
||||
use_upx = use_strip = False
|
||||
del src_path, hmac_path, chk_path
|
||||
|
||||
# Exit early if no processing is required after above rules are applied.
|
||||
if not use_strip and not use_upx and not is_darwin:
|
||||
return src_name
|
||||
|
||||
# Prepare cache directory path. Cache is tied to python major/minor version, but also to various processing options.
|
||||
pyver = f'py{sys.version_info[0]}{sys.version_info[1]}'
|
||||
arch = platform.architecture()[0]
|
||||
cache_dir = os.path.join(
|
||||
CONF['cachedir'],
|
||||
f'bincache{use_strip:d}{use_upx:d}{pyver}{arch}',
|
||||
)
|
||||
if target_arch:
|
||||
cache_dir = os.path.join(cache_dir, target_arch)
|
||||
if is_darwin:
|
||||
# Separate by codesign identity
|
||||
if codesign_identity:
|
||||
# Compute hex digest of codesign identity string to prevent issues with invalid characters.
|
||||
csi_hash = hashlib.sha256(codesign_identity.encode('utf-8'))
|
||||
cache_dir = os.path.join(cache_dir, csi_hash.hexdigest())
|
||||
else:
|
||||
cache_dir = os.path.join(cache_dir, 'adhoc') # ad-hoc signing
|
||||
# Separate by entitlements
|
||||
if entitlements_file:
|
||||
# Compute hex digest of entitlements file contents
|
||||
with open(entitlements_file, 'rb') as fp:
|
||||
ef_hash = hashlib.sha256(fp.read())
|
||||
cache_dir = os.path.join(cache_dir, ef_hash.hexdigest())
|
||||
else:
|
||||
cache_dir = os.path.join(cache_dir, 'no-entitlements')
|
||||
os.makedirs(cache_dir, exist_ok=True)
|
||||
|
||||
# Load cache index, if available
|
||||
cache_index_file = os.path.join(cache_dir, "index.dat")
|
||||
try:
|
||||
cache_index = misc.load_py_data_struct(cache_index_file)
|
||||
except FileNotFoundError:
|
||||
cache_index = {}
|
||||
except Exception:
|
||||
# Tell the user they may want to fix their cache... However, do not delete it for them; if it keeps getting
|
||||
# corrupted, we will never find out.
|
||||
logger.warning("PyInstaller bincache may be corrupted; use pyinstaller --clean to fix it.")
|
||||
raise
|
||||
|
||||
# Look up the file in cache; use case-normalized destination name as identifier.
|
||||
cached_id = os.path.normcase(dest_name)
|
||||
cached_name = os.path.join(cache_dir, dest_name)
|
||||
src_digest = _compute_file_digest(src_name)
|
||||
|
||||
if cached_id in cache_index:
|
||||
# If digest matches to the cached digest, return the cached file...
|
||||
if src_digest == cache_index[cached_id]:
|
||||
return cached_name
|
||||
|
||||
# ... otherwise remove it.
|
||||
os.remove(cached_name)
|
||||
|
||||
# Ensure parent path exists
|
||||
os.makedirs(os.path.dirname(cached_name), exist_ok=True)
|
||||
|
||||
# Use `shutil.copyfile` to copy the file with default permissions bits, then manually set executable
|
||||
# bits. This way, we avoid copying permission bits and metadata from the original file, which might be too
|
||||
# restrictive for further processing (read-only permissions, immutable flag on FreeBSD, and so on).
|
||||
shutil.copyfile(src_name, cached_name)
|
||||
os.chmod(cached_name, 0o755)
|
||||
|
||||
# Apply strip
|
||||
if use_strip:
|
||||
strip_options = []
|
||||
if is_darwin:
|
||||
# The default strip behavior breaks some shared libraries under macOS.
|
||||
strip_options = ["-S"] # -S = strip only debug symbols.
|
||||
elif is_aix:
|
||||
# Set -X32_64 flag to have strip transparently process both 32-bit and 64-bit binaries, without user having
|
||||
# to set OBJECT_MODE environment variable prior to the build. Also accommodates potential mixed-case
|
||||
# scenario, for example a 32-bit utility program being collected into a 64-bit application bundle.
|
||||
strip_options = ["-X32_64"]
|
||||
|
||||
cmd = ["strip", *strip_options, cached_name]
|
||||
logger.info("Executing: %s", " ".join(cmd))
|
||||
try:
|
||||
p = subprocess.run(
|
||||
cmd,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
check=True,
|
||||
errors='ignore',
|
||||
encoding='utf-8',
|
||||
)
|
||||
logger.debug("Output from strip command:\n%s", p.stdout)
|
||||
except subprocess.CalledProcessError as e:
|
||||
show_warning = True
|
||||
|
||||
# On AIX, strip utility raises an error when ran against already-stripped binary. Catch the corresponding
|
||||
# message (`0654-419 The specified archive file was already stripped.`) and suppress the warning.
|
||||
if is_aix and "0654-419" in e.stdout:
|
||||
show_warning = False
|
||||
|
||||
if show_warning:
|
||||
logger.warning("Failed to run strip on %r!", cached_name, exc_info=True)
|
||||
logger.warning("Output from strip command:\n%s", e.stdout)
|
||||
except Exception:
|
||||
logger.warning("Failed to run strip on %r!", cached_name, exc_info=True)
|
||||
|
||||
# Apply UPX
|
||||
if use_upx:
|
||||
upx_exe = 'upx'
|
||||
upx_dir = CONF['upx_dir']
|
||||
if upx_dir:
|
||||
upx_exe = os.path.join(upx_dir, upx_exe)
|
||||
|
||||
upx_options = [
|
||||
# Do not compress icons, so that they can still be accessed externally.
|
||||
'--compress-icons=0',
|
||||
# Use LZMA compression.
|
||||
'--lzma',
|
||||
# Quiet mode.
|
||||
'-q',
|
||||
]
|
||||
if is_win:
|
||||
# Binaries built with Visual Studio 7.1 require --strip-loadconf or they will not compress.
|
||||
upx_options.append('--strip-loadconf')
|
||||
|
||||
cmd = [upx_exe, *upx_options, cached_name]
|
||||
logger.info("Executing: %s", " ".join(cmd))
|
||||
try:
|
||||
p = subprocess.run(
|
||||
cmd,
|
||||
stdin=subprocess.DEVNULL,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
check=True,
|
||||
errors='ignore',
|
||||
encoding='utf-8',
|
||||
)
|
||||
logger.debug("Output from upx command:\n%s", p.stdout)
|
||||
except subprocess.CalledProcessError as e:
|
||||
logger.warning("Failed to upx strip on %r!", cached_name, exc_info=True)
|
||||
logger.warning("Output from upx command:\n%s", e.stdout)
|
||||
except Exception:
|
||||
logger.warning("Failed to run upx on %r!", cached_name, exc_info=True)
|
||||
|
||||
# On macOS, we need to modify the given binary's paths to the dependent libraries, in order to ensure they are
|
||||
# relocatable and always refer to location within the frozen application. Specifically, we make all dependent
|
||||
# library paths relative to @rpath, and set @rpath to point to the top-level application directory, relative to
|
||||
# the binary's location (i.e., @loader_path).
|
||||
#
|
||||
# While modifying the headers invalidates existing signatures, we avoid removing them in order to speed things up
|
||||
# (and to avoid potential bugs in the codesign utility, like the one reported on macOS 10.13 in #6167).
|
||||
# The forced re-signing at the end should take care of the invalidated signatures.
|
||||
if is_darwin:
|
||||
try:
|
||||
osxutils.binary_to_target_arch(cached_name, target_arch, display_name=src_name)
|
||||
#osxutils.remove_signature_from_binary(cached_name) # Disabled as per comment above.
|
||||
target_rpath = str(
|
||||
pathlib.PurePath('@loader_path', *['..' for level in pathlib.PurePath(dest_name).parent.parts])
|
||||
)
|
||||
osxutils.set_dylib_dependency_paths(cached_name, target_rpath)
|
||||
osxutils.sign_binary(cached_name, codesign_identity, entitlements_file)
|
||||
except osxutils.InvalidBinaryError:
|
||||
# Raised by osxutils.binary_to_target_arch when the given file is not a valid macOS binary (for example,
|
||||
# a linux .so file; see issue #6327). The error prevents any further processing, so just ignore it.
|
||||
pass
|
||||
except osxutils.IncompatibleBinaryArchError:
|
||||
# Raised by osxutils.binary_to_target_arch when the given file does not contain (all) required arch slices.
|
||||
# Depending on the strict validation mode, re-raise or swallow the error.
|
||||
#
|
||||
# Strict validation should be enabled only for binaries where the architecture *must* match the target one,
|
||||
# i.e., the extension modules. Everything else is pretty much a gray area, for example:
|
||||
# * a universal2 extension may have its x86_64 and arm64 slices linked against distinct single-arch/thin
|
||||
# shared libraries
|
||||
# * a collected executable that is launched by python code via a subprocess can be x86_64-only, even though
|
||||
# the actual python code is running on M1 in native arm64 mode.
|
||||
if strict_arch_validation:
|
||||
raise
|
||||
logger.debug("File %s failed optional architecture validation - collecting as-is!", src_name)
|
||||
except Exception as e:
|
||||
raise SystemError(f"Failed to process binary {cached_name!r}!") from e
|
||||
|
||||
# Update cache index
|
||||
cache_index[cached_id] = src_digest
|
||||
misc.save_py_data_struct(cache_index_file, cache_index)
|
||||
|
||||
return cached_name
|
||||
|
||||
|
||||
def _compute_file_digest(filename):
|
||||
hasher = hashlib.sha1()
|
||||
with open(filename, "rb") as fp:
|
||||
for chunk in iter(lambda: fp.read(16 * 1024), b""):
|
||||
hasher.update(chunk)
|
||||
return bytearray(hasher.digest())
|
||||
|
||||
|
||||
def _check_path_overlap(path):
|
||||
"""
|
||||
Check that path does not overlap with WORKPATH or SPECPATH (i.e., WORKPATH and SPECPATH may not start with path,
|
||||
which could be caused by a faulty hand-edited specfile).
|
||||
|
||||
Raise SystemExit if there is overlap, return True otherwise
|
||||
"""
|
||||
from PyInstaller.config import CONF
|
||||
specerr = 0
|
||||
if CONF['workpath'].startswith(path):
|
||||
logger.error('Specfile error: The output path "%s" contains WORKPATH (%s)', path, CONF['workpath'])
|
||||
specerr += 1
|
||||
if CONF['specpath'].startswith(path):
|
||||
logger.error('Specfile error: The output path "%s" contains SPECPATH (%s)', path, CONF['specpath'])
|
||||
specerr += 1
|
||||
if specerr:
|
||||
raise SystemExit(
|
||||
'ERROR: Please edit/recreate the specfile (%s) and set a different output name (e.g. "dist").' %
|
||||
CONF['spec']
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
def _make_clean_directory(path):
|
||||
"""
|
||||
Create a clean directory from the given directory name.
|
||||
"""
|
||||
if _check_path_overlap(path):
|
||||
if os.path.isdir(path) or os.path.isfile(path):
|
||||
try:
|
||||
os.remove(path)
|
||||
except OSError:
|
||||
_rmtree(path)
|
||||
|
||||
os.makedirs(path, exist_ok=True)
|
||||
|
||||
|
||||
def _rmtree(path):
|
||||
"""
|
||||
Remove directory and all its contents, but only after user confirmation, or if the -y option is set.
|
||||
"""
|
||||
from PyInstaller.config import CONF
|
||||
if CONF['noconfirm']:
|
||||
choice = 'y'
|
||||
elif sys.stdout.isatty():
|
||||
choice = input(
|
||||
'WARNING: The output directory "%s" and ALL ITS CONTENTS will be REMOVED! Continue? (y/N)' % path
|
||||
)
|
||||
else:
|
||||
raise SystemExit(
|
||||
'ERROR: The output directory "%s" is not empty. Please remove all its contents or use the -y option (remove'
|
||||
' output directory without confirmation).' % path
|
||||
)
|
||||
if choice.strip().lower() == 'y':
|
||||
if not CONF['noconfirm']:
|
||||
print("On your own risk, you can use the option `--noconfirm` to get rid of this question.")
|
||||
logger.info('Removing dir %s', path)
|
||||
shutil.rmtree(path)
|
||||
else:
|
||||
raise SystemExit('User aborted')
|
||||
|
||||
|
||||
# TODO Refactor to prohibit empty target directories. As the docstring below documents, this function currently permits
|
||||
# the second item of each 2-tuple in "hook.datas" to be the empty string, in which case the target directory defaults to
|
||||
# the source directory's basename. However, this functionality is very fragile and hence bad. Instead:
|
||||
#
|
||||
# * An exception should be raised if such item is empty.
|
||||
# * All hooks currently passing the empty string for such item (e.g.,
|
||||
# "hooks/hook-babel.py", "hooks/hook-matplotlib.py") should be refactored
|
||||
# to instead pass such basename.
|
||||
def format_binaries_and_datas(binaries_or_datas, workingdir=None):
|
||||
"""
|
||||
Convert the passed list of hook-style 2-tuples into a returned set of `TOC`-style 2-tuples.
|
||||
|
||||
Elements of the passed list are 2-tuples `(source_dir_or_glob, target_dir)`.
|
||||
Elements of the returned set are 2-tuples `(target_file, source_file)`.
|
||||
For backwards compatibility, the order of elements in the former tuples are the reverse of the order of elements in
|
||||
the latter tuples!
|
||||
|
||||
Parameters
|
||||
----------
|
||||
binaries_or_datas : list
|
||||
List of hook-style 2-tuples (e.g., the top-level `binaries` and `datas` attributes defined by hooks) whose:
|
||||
* The first element is either:
|
||||
* A glob matching only the absolute or relative paths of source non-Python data files.
|
||||
* The absolute or relative path of a source directory containing only source non-Python data files.
|
||||
* The second element is the relative path of the target directory into which these source files will be
|
||||
recursively copied.
|
||||
|
||||
If the optional `workingdir` parameter is passed, source paths may be either absolute or relative; else, source
|
||||
paths _must_ be absolute.
|
||||
workingdir : str
|
||||
Optional absolute path of the directory to which all relative source paths in the `binaries_or_datas`
|
||||
parameter will be prepended by (and hence converted into absolute paths) _or_ `None` if these paths are to be
|
||||
preserved as relative. Defaults to `None`.
|
||||
|
||||
Returns
|
||||
----------
|
||||
set
|
||||
Set of `TOC`-style 2-tuples whose:
|
||||
* First element is the absolute or relative path of a target file.
|
||||
* Second element is the absolute or relative path of the corresponding source file to be copied to this target
|
||||
file.
|
||||
"""
|
||||
toc_datas = set()
|
||||
|
||||
for src_root_path_or_glob, trg_root_dir in binaries_or_datas:
|
||||
# Disallow empty source path. Those are typically result of errors, and result in implicit collection of the
|
||||
# whole current working directory, which is never a good idea.
|
||||
if not src_root_path_or_glob:
|
||||
raise InvalidSrcDestTupleError(
|
||||
(src_root_path_or_glob, trg_root_dir),
|
||||
"Empty SRC is not allowed when adding binary and data files, as it would result in collection of the "
|
||||
"whole current working directory."
|
||||
)
|
||||
if not trg_root_dir:
|
||||
raise InvalidSrcDestTupleError(
|
||||
(src_root_path_or_glob, trg_root_dir),
|
||||
"Empty DEST_DIR is not allowed - to collect files into application's top-level directory, use "
|
||||
f"{os.curdir!r}."
|
||||
)
|
||||
# Disallow absolute target paths, as well as target paths that would end up pointing outside of the
|
||||
# application's top-level directory.
|
||||
if os.path.isabs(trg_root_dir):
|
||||
raise InvalidSrcDestTupleError((src_root_path_or_glob, trg_root_dir), "DEST_DIR must be a relative path!")
|
||||
if os.path.normpath(trg_root_dir).startswith('..'):
|
||||
raise InvalidSrcDestTupleError(
|
||||
(src_root_path_or_glob, trg_root_dir),
|
||||
"DEST_DIR must not point outside of application's top-level directory!",
|
||||
)
|
||||
|
||||
# Convert relative to absolute paths if required.
|
||||
if workingdir and not os.path.isabs(src_root_path_or_glob):
|
||||
src_root_path_or_glob = os.path.join(workingdir, src_root_path_or_glob)
|
||||
|
||||
# Normalize paths.
|
||||
src_root_path_or_glob = os.path.normpath(src_root_path_or_glob)
|
||||
|
||||
# If given source path is a file or directory path, pass it on.
|
||||
# If not, treat it as a glob and pass on all matching paths. However, we need to preserve the directories
|
||||
# captured by the glob - as opposed to collecting their contents into top-level target directory. Therefore,
|
||||
# we set a flag which is used in subsequent processing to distinguish between original directory paths and
|
||||
# directory paths that were captured by the glob.
|
||||
if os.path.isfile(src_root_path_or_glob) or os.path.isdir(src_root_path_or_glob):
|
||||
src_root_paths = [src_root_path_or_glob]
|
||||
was_glob = False
|
||||
else:
|
||||
src_root_paths = glob.glob(src_root_path_or_glob)
|
||||
was_glob = True
|
||||
|
||||
if not src_root_paths:
|
||||
raise SystemExit(f'ERROR: Unable to find {src_root_path_or_glob!r} when adding binary and data files.')
|
||||
|
||||
for src_root_path in src_root_paths:
|
||||
if os.path.isfile(src_root_path):
|
||||
# Normalizing the result to remove redundant relative paths (e.g., removing "./" from "trg/./file").
|
||||
toc_datas.add((
|
||||
os.path.normpath(os.path.join(trg_root_dir, os.path.basename(src_root_path))),
|
||||
os.path.normpath(src_root_path),
|
||||
))
|
||||
elif os.path.isdir(src_root_path):
|
||||
for src_dir, src_subdir_basenames, src_file_basenames in os.walk(src_root_path):
|
||||
# Ensure the current source directory is a subdirectory of the passed top-level source directory.
|
||||
# Since os.walk() does *NOT* follow symlinks by default, this should be the case. (But let's make
|
||||
# sure.)
|
||||
assert src_dir.startswith(src_root_path)
|
||||
|
||||
# Relative path of the current target directory, obtained by:
|
||||
#
|
||||
# * Stripping the top-level source directory from the current source directory (e.g., removing
|
||||
# "/top" from "/top/dir").
|
||||
# * Normalizing the result to remove redundant relative paths (e.g., removing "./" from
|
||||
# "trg/./file").
|
||||
if was_glob:
|
||||
# Preserve directories captured by glob.
|
||||
rel_dir = os.path.relpath(src_dir, os.path.dirname(src_root_path))
|
||||
else:
|
||||
rel_dir = os.path.relpath(src_dir, src_root_path)
|
||||
trg_dir = os.path.normpath(os.path.join(trg_root_dir, rel_dir))
|
||||
|
||||
for src_file_basename in src_file_basenames:
|
||||
src_file = os.path.join(src_dir, src_file_basename)
|
||||
if os.path.isfile(src_file):
|
||||
# Normalize the result to remove redundant relative paths (e.g., removing "./" from
|
||||
# "trg/./file").
|
||||
toc_datas.add((
|
||||
os.path.normpath(os.path.join(trg_dir, src_file_basename)), os.path.normpath(src_file)
|
||||
))
|
||||
|
||||
return toc_datas
|
||||
|
||||
|
||||
def get_code_object(modname, filename, optimize):
|
||||
"""
|
||||
Get the code-object for a module.
|
||||
|
||||
This is a simplifed non-performant version which circumvents __pycache__.
|
||||
"""
|
||||
|
||||
# Once upon a time, we compiled dummy code objects for PEP-420 namespace packages. We do not do that anymore.
|
||||
assert filename not in {'-', None}, "Called with PEP-420 namespace package!"
|
||||
|
||||
_, ext = os.path.splitext(filename)
|
||||
ext = ext.lower()
|
||||
|
||||
if ext == '.pyc':
|
||||
# The module is available in binary-only form. Read the contents of .pyc file using helper function, which
|
||||
# supports reading from either stand-alone or archive-embedded .pyc files.
|
||||
logger.debug('Reading code object from .pyc file %s', filename)
|
||||
pyc_data = _read_pyc_data(filename)
|
||||
code_object = marshal.loads(pyc_data[16:])
|
||||
else:
|
||||
# Assume this is a source .py file, but allow an arbitrary extension (other than .pyc, which is taken in
|
||||
# the above branch). This allows entry-point scripts to have an arbitrary (or no) extension, as tested by
|
||||
# the `test_arbitrary_ext` in `test_basic.py`.
|
||||
logger.debug('Compiling python script/module file %s', filename)
|
||||
|
||||
with open(filename, 'rb') as f:
|
||||
source = f.read()
|
||||
|
||||
# If entry-point script has no suffix, append .py when compiling the source. In POSIX builds, the executable
|
||||
# has no suffix either; this causes issues with `traceback` module, as it tries to read the executable file
|
||||
# when trying to look up the code for the entry-point script (when current working directory contains the
|
||||
# executable).
|
||||
_, ext = os.path.splitext(filename)
|
||||
if not ext:
|
||||
logger.debug("Appending .py to compiled entry-point name...")
|
||||
filename += '.py'
|
||||
|
||||
try:
|
||||
code_object = compile(source, filename, 'exec', optimize=optimize)
|
||||
except SyntaxError:
|
||||
logger.warning("Sytnax error while compiling %s", filename)
|
||||
raise
|
||||
|
||||
return code_object
|
||||
|
||||
|
||||
def replace_filename_in_code_object(code_object, filename):
|
||||
"""
|
||||
Recursively replace the `co_filename` in the given code object and code objects stored in its `co_consts` entries.
|
||||
Primarily used to anonymize collected code objects, i.e., by removing the build environment's paths from them.
|
||||
"""
|
||||
|
||||
consts = tuple(
|
||||
replace_filename_in_code_object(const_co, filename) if isinstance(const_co, types.CodeType) else const_co
|
||||
for const_co in code_object.co_consts
|
||||
)
|
||||
|
||||
return code_object.replace(co_consts=consts, co_filename=filename)
|
||||
|
||||
|
||||
def _should_include_system_binary(binary_tuple, exceptions):
|
||||
"""
|
||||
Return True if the given binary_tuple describes a system binary that should be included.
|
||||
|
||||
Exclude all system library binaries other than those with "lib-dynload" in the destination or "python" in the
|
||||
source, except for those matching the patterns in the exceptions list. Intended to be used from the Analysis
|
||||
exclude_system_libraries method.
|
||||
"""
|
||||
dest = binary_tuple[0]
|
||||
if dest.startswith(f'python{sys.version_info.major}.{sys.version_info.minor}/lib-dynload'):
|
||||
return True
|
||||
src = binary_tuple[1]
|
||||
if fnmatch.fnmatch(src, '*python*'):
|
||||
return True
|
||||
if not src.startswith('/lib') and not src.startswith('/usr/lib'):
|
||||
return True
|
||||
for exception in exceptions:
|
||||
if fnmatch.fnmatch(dest, exception):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def compile_pymodule(name, src_path, workpath, optimize, code_cache=None):
|
||||
"""
|
||||
Given the name and source file for a pure-python module, compile the module in the specified working directory,
|
||||
and return the name of resulting .pyc file. The paths in the resulting .pyc module are anonymized by having their
|
||||
absolute prefix removed.
|
||||
|
||||
If a .pyc file with matching name already exists in the target working directory, it is re-used (provided it has
|
||||
compatible bytecode magic in the header, and that its modification time is newer than that of the source file).
|
||||
|
||||
If the specified module is available in binary-only form, the input .pyc file is copied to the target working
|
||||
directory and post-processed. If the specified module is available in source form, it is compiled only if
|
||||
corresponding code object is not available in the optional code-object cache; otherwise, it is copied from cache
|
||||
and post-processed. When compiling the module, the specified byte-code optimization level is used.
|
||||
|
||||
It is up to caller to ensure that the optional code-object cache contains only code-objects of target optimization
|
||||
level, and that if the specified working directory already contains .pyc files, that they were created with target
|
||||
optimization level.
|
||||
"""
|
||||
|
||||
# Construct the target .pyc filename in the workpath
|
||||
split_name = name.split(".")
|
||||
if "__init__" in src_path:
|
||||
# __init__ module; use "__init__" as module name, and construct parent path using all components of the
|
||||
# fully-qualified name
|
||||
parent_dirs = split_name
|
||||
mod_basename = "__init__"
|
||||
else:
|
||||
# Regular module; use last component of the fully-qualified name as module name, and the rest as the parent
|
||||
# path.
|
||||
parent_dirs = split_name[:-1]
|
||||
mod_basename = split_name[-1]
|
||||
pyc_path = os.path.join(workpath, *parent_dirs, mod_basename + '.pyc')
|
||||
|
||||
# Check if optional cache contains module entry
|
||||
code_object = code_cache.get(name, None) if code_cache else None
|
||||
|
||||
if code_object is None:
|
||||
_, ext = os.path.splitext(src_path)
|
||||
ext = ext.lower()
|
||||
|
||||
if ext == '.py':
|
||||
# Source py file; read source and compile it.
|
||||
with open(src_path, 'rb') as f:
|
||||
src_data = f.read()
|
||||
code_object = compile(src_data, src_path, 'exec', optimize=optimize)
|
||||
elif ext == '.pyc':
|
||||
# The module is available in binary-only form. Read the contents of .pyc file using helper function, which
|
||||
# supports reading from either stand-alone or archive-embedded .pyc files.
|
||||
pyc_data = _read_pyc_data(src_path)
|
||||
# Unmarshal code object; this is necessary if we want to strip paths from it
|
||||
code_object = marshal.loads(pyc_data[16:])
|
||||
else:
|
||||
raise ValueError(f"Invalid python module file {src_path}; unhandled extension {ext}!")
|
||||
|
||||
# Replace co_filename in code object with anonymized filename that does not contain full path. Construct the
|
||||
# relative filename from module name, similar how we earlier constructed the `pyc_path`.
|
||||
co_filename = os.path.join(*parent_dirs, mod_basename + '.py')
|
||||
code_object = replace_filename_in_code_object(code_object, co_filename)
|
||||
|
||||
# Write complete .pyc module to in-memory stream. Then, check if .pyc file already exists, compare contents, and
|
||||
# (re)write it only if different. This avoids unnecessary (re)writing of the file, and in turn also avoids
|
||||
# unnecessary cache invalidation for targets that make use of the .pyc file (e.g., PKG, COLLECT).
|
||||
with io.BytesIO() as pyc_stream:
|
||||
pyc_stream.write(compat.BYTECODE_MAGIC)
|
||||
pyc_stream.write(struct.pack('<I', 0b01)) # PEP-552: hash-based pyc, check_source=False
|
||||
pyc_stream.write(b'\00' * 8) # Zero the source hash
|
||||
marshal.dump(code_object, pyc_stream)
|
||||
pyc_data = pyc_stream.getvalue()
|
||||
|
||||
if os.path.isfile(pyc_path):
|
||||
with open(pyc_path, 'rb') as fh:
|
||||
existing_pyc_data = fh.read()
|
||||
if pyc_data == existing_pyc_data:
|
||||
return pyc_path # Return path to (existing) file.
|
||||
|
||||
# Ensure the existence of parent directories for the target pyc path
|
||||
os.makedirs(os.path.dirname(pyc_path), exist_ok=True)
|
||||
|
||||
# Write
|
||||
with open(pyc_path, 'wb') as fh:
|
||||
fh.write(pyc_data)
|
||||
|
||||
# Return output path
|
||||
return pyc_path
|
||||
|
||||
|
||||
def _read_pyc_data(filename):
|
||||
"""
|
||||
Helper for reading data from .pyc files. Supports both stand-alone and archive-embedded .pyc files. Used by
|
||||
`compile_pymodule` and `get_code_object` helper functions.
|
||||
"""
|
||||
src_file = pathlib.Path(filename)
|
||||
|
||||
if src_file.is_file():
|
||||
# Stand-alone .pyc file.
|
||||
pyc_data = src_file.read_bytes()
|
||||
else:
|
||||
# Check if .pyc file is stored in a .zip archive, as is the case for stdlib modules in embeddable
|
||||
# python on Windows.
|
||||
parent_zip_file = misc.path_to_parent_archive(src_file)
|
||||
if parent_zip_file is not None and zipfile.is_zipfile(parent_zip_file):
|
||||
with zipfile.ZipFile(parent_zip_file, 'r') as zip_archive:
|
||||
# NOTE: zip entry names must be in POSIX format, even on Windows!
|
||||
zip_entry_name = str(src_file.relative_to(parent_zip_file).as_posix())
|
||||
pyc_data = zip_archive.read(zip_entry_name)
|
||||
else:
|
||||
raise FileNotFoundError(f"Cannot find .pyc file {filename!r}!")
|
||||
|
||||
# Verify the python version
|
||||
if pyc_data[:4] != compat.BYTECODE_MAGIC:
|
||||
raise ValueError(f"The .pyc module {filename} was compiled for incompatible version of python!")
|
||||
|
||||
return pyc_data
|
||||
|
||||
|
||||
def postprocess_binaries_toc_pywin32(binaries):
|
||||
"""
|
||||
Process the given `binaries` TOC list to apply work around for `pywin32` package, fixing the target directory
|
||||
for collected extensions.
|
||||
"""
|
||||
# Ensure that all files collected from `win32` or `pythonwin` into top-level directory are put back into
|
||||
# their corresponding directories. They end up in top-level directory because `pywin32.pth` adds both
|
||||
# directories to the `sys.path`, so they end up visible as top-level directories. But these extensions
|
||||
# might in fact be linked against each other, so we should preserve the directory layout for consistency
|
||||
# between modulegraph-discovered extensions and linked binaries discovered by link-time dependency analysis.
|
||||
# Within the same framework, also consider `pywin32_system32`, just in case.
|
||||
PYWIN32_SUBDIRS = {'win32', 'pythonwin', 'pywin32_system32'}
|
||||
|
||||
processed_binaries = []
|
||||
for dest_name, src_name, typecode in binaries:
|
||||
dest_path = pathlib.PurePath(dest_name)
|
||||
src_path = pathlib.PurePath(src_name)
|
||||
|
||||
if dest_path.parent == pathlib.PurePath('.') and src_path.parent.name.lower() in PYWIN32_SUBDIRS:
|
||||
dest_path = pathlib.PurePath(src_path.parent.name) / dest_path
|
||||
dest_name = str(dest_path)
|
||||
|
||||
processed_binaries.append((dest_name, src_name, typecode))
|
||||
|
||||
return processed_binaries
|
||||
|
||||
|
||||
def postprocess_binaries_toc_pywin32_anaconda(binaries):
|
||||
"""
|
||||
Process the given `binaries` TOC list to apply work around for Anaconda `pywin32` package, fixing the location
|
||||
of collected `pywintypes3X.dll` and `pythoncom3X.dll`.
|
||||
"""
|
||||
# The Anaconda-provided `pywin32` package installs three copies of `pywintypes3X.dll` and `pythoncom3X.dll`,
|
||||
# located in the following directories (relative to the environment):
|
||||
# - Library/bin
|
||||
# - Lib/site-packages/pywin32_system32
|
||||
# - Lib/site-packages/win32
|
||||
#
|
||||
# This turns our dependency scanner and directory layout preservation mechanism into a lottery based on what
|
||||
# `pywin32` modules are imported and in what order. To keep things simple, we deal with this insanity by
|
||||
# post-processing the `binaries` list, modifying the destination of offending copies, and let the final TOC
|
||||
# list normalization deal with potential duplicates.
|
||||
DLL_CANDIDATES = {
|
||||
f"pywintypes{sys.version_info[0]}{sys.version_info[1]}.dll",
|
||||
f"pythoncom{sys.version_info[0]}{sys.version_info[1]}.dll",
|
||||
}
|
||||
|
||||
DUPLICATE_DIRS = {
|
||||
pathlib.PurePath('.'),
|
||||
pathlib.PurePath('win32'),
|
||||
}
|
||||
|
||||
processed_binaries = []
|
||||
for dest_name, src_name, typecode in binaries:
|
||||
# Check if we need to divert - based on the destination base name and destination parent directory.
|
||||
dest_path = pathlib.PurePath(dest_name)
|
||||
if dest_path.name.lower() in DLL_CANDIDATES and dest_path.parent in DUPLICATE_DIRS:
|
||||
dest_path = pathlib.PurePath("pywin32_system32") / dest_path.name
|
||||
dest_name = str(dest_path)
|
||||
|
||||
processed_binaries.append((dest_name, src_name, typecode))
|
||||
|
||||
return processed_binaries
|
||||
|
||||
|
||||
def create_base_library_zip(filename, modules_toc, code_cache=None):
|
||||
"""
|
||||
Create a zip archive with python modules that are needed during python interpreter initialization.
|
||||
"""
|
||||
with zipfile.ZipFile(filename, 'w') as zf:
|
||||
for name, src_path, typecode in modules_toc:
|
||||
# Obtain code object from cache, or compile it.
|
||||
code = None if code_cache is None else code_cache.get(name, None)
|
||||
if code is None:
|
||||
optim_level = {'PYMODULE': 0, 'PYMODULE-1': 1, 'PYMODULE-2': 2}[typecode]
|
||||
code = get_code_object(name, src_path, optimize=optim_level)
|
||||
# Determine destination name
|
||||
dest_name = name.replace('.', os.sep)
|
||||
# Special case: packages have an implied `__init__` filename that needs to be added.
|
||||
basename, ext = os.path.splitext(os.path.basename(src_path))
|
||||
if basename == '__init__':
|
||||
dest_name += os.sep + '__init__'
|
||||
dest_name += '.pyc' # Always .pyc, regardless of optimization level.
|
||||
# Replace full-path co_filename in code object with `dest_name` (and shorten suffix from .pyc to .py).
|
||||
code = replace_filename_in_code_object(code, dest_name[:-1])
|
||||
# Write the .pyc module
|
||||
with io.BytesIO() as fc:
|
||||
fc.write(compat.BYTECODE_MAGIC)
|
||||
fc.write(struct.pack('<I', 0b01)) # PEP-552: hash-based pyc, check_source=False
|
||||
fc.write(b'\00' * 8) # Match behavior of `building.utils.compile_pymodule`
|
||||
marshal.dump(code, fc)
|
||||
# Use a ZipInfo to set timestamp for deterministic build.
|
||||
info = zipfile.ZipInfo(dest_name)
|
||||
zf.writestr(info, fc.getvalue())
|
||||
Reference in New Issue
Block a user