first commit
This commit is contained in:
commit
417e54da96
5696 changed files with 900003 additions and 0 deletions
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,118 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from abc import ABC
|
||||
from pathlib import Path
|
||||
|
||||
from virtualenv.seed.seeder import Seeder
|
||||
from virtualenv.seed.wheels import Version
|
||||
|
||||
PERIODIC_UPDATE_ON_BY_DEFAULT = True
|
||||
|
||||
|
||||
class BaseEmbed(Seeder, ABC):
|
||||
def __init__(self, options) -> None:
|
||||
super().__init__(options, enabled=options.no_seed is False)
|
||||
|
||||
self.download = options.download
|
||||
self.extra_search_dir = [i.resolve() for i in options.extra_search_dir if i.exists()]
|
||||
|
||||
self.pip_version = options.pip
|
||||
self.setuptools_version = options.setuptools
|
||||
self.wheel_version = options.wheel
|
||||
|
||||
self.no_pip = options.no_pip
|
||||
self.no_setuptools = options.no_setuptools
|
||||
self.no_wheel = options.no_wheel
|
||||
self.app_data = options.app_data
|
||||
self.periodic_update = not options.no_periodic_update
|
||||
|
||||
if not self.distribution_to_versions():
|
||||
self.enabled = False
|
||||
|
||||
@classmethod
|
||||
def distributions(cls) -> dict[str, Version]:
|
||||
return {
|
||||
"pip": Version.bundle,
|
||||
"setuptools": Version.bundle,
|
||||
"wheel": Version.bundle,
|
||||
}
|
||||
|
||||
def distribution_to_versions(self) -> dict[str, str]:
|
||||
return {
|
||||
distribution: getattr(self, f"{distribution}_version")
|
||||
for distribution in self.distributions()
|
||||
if getattr(self, f"no_{distribution}") is False and getattr(self, f"{distribution}_version") != "none"
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def add_parser_arguments(cls, parser, interpreter, app_data): # noqa: ARG003
|
||||
group = parser.add_mutually_exclusive_group()
|
||||
group.add_argument(
|
||||
"--no-download",
|
||||
"--never-download",
|
||||
dest="download",
|
||||
action="store_false",
|
||||
help=f"pass to disable download of the latest {'/'.join(cls.distributions())} from PyPI",
|
||||
default=True,
|
||||
)
|
||||
group.add_argument(
|
||||
"--download",
|
||||
dest="download",
|
||||
action="store_true",
|
||||
help=f"pass to enable download of the latest {'/'.join(cls.distributions())} from PyPI",
|
||||
default=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--extra-search-dir",
|
||||
metavar="d",
|
||||
type=Path,
|
||||
nargs="+",
|
||||
help="a path containing wheels to extend the internal wheel list (can be set 1+ times)",
|
||||
default=[],
|
||||
)
|
||||
for distribution, default in cls.distributions().items():
|
||||
if interpreter.version_info[:2] >= (3, 12) and distribution in {"wheel", "setuptools"}:
|
||||
default = "none" # noqa: PLW2901
|
||||
parser.add_argument(
|
||||
f"--{distribution}",
|
||||
dest=distribution,
|
||||
metavar="version",
|
||||
help=f"version of {distribution} to install as seed: embed, bundle, none or exact version",
|
||||
default=default,
|
||||
)
|
||||
for distribution in cls.distributions():
|
||||
parser.add_argument(
|
||||
f"--no-{distribution}",
|
||||
dest=f"no_{distribution}",
|
||||
action="store_true",
|
||||
help=f"do not install {distribution}",
|
||||
default=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-periodic-update",
|
||||
dest="no_periodic_update",
|
||||
action="store_true",
|
||||
help="disable the periodic (once every 14 days) update of the embedded wheels",
|
||||
default=not PERIODIC_UPDATE_ON_BY_DEFAULT,
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
result = self.__class__.__name__
|
||||
result += "("
|
||||
if self.extra_search_dir:
|
||||
result += f"extra_search_dir={', '.join(str(i) for i in self.extra_search_dir)},"
|
||||
result += f"download={self.download},"
|
||||
for distribution in self.distributions():
|
||||
if getattr(self, f"no_{distribution}"):
|
||||
continue
|
||||
version = getattr(self, f"{distribution}_version", None)
|
||||
if version == "none":
|
||||
continue
|
||||
ver = f"={version or 'latest'}"
|
||||
result += f" {distribution}{ver},"
|
||||
return result[:-1] + ")"
|
||||
|
||||
|
||||
__all__ = [
|
||||
"BaseEmbed",
|
||||
]
|
|
@ -0,0 +1,63 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
from subprocess import Popen
|
||||
|
||||
from virtualenv.discovery.cached_py_info import LogCmd
|
||||
from virtualenv.seed.embed.base_embed import BaseEmbed
|
||||
from virtualenv.seed.wheels import Version, get_wheel, pip_wheel_env_run
|
||||
|
||||
|
||||
class PipInvoke(BaseEmbed):
|
||||
def __init__(self, options) -> None:
|
||||
super().__init__(options)
|
||||
|
||||
def run(self, creator):
|
||||
if not self.enabled:
|
||||
return
|
||||
for_py_version = creator.interpreter.version_release_str
|
||||
with self.get_pip_install_cmd(creator.exe, for_py_version) as cmd:
|
||||
env = pip_wheel_env_run(self.extra_search_dir, self.app_data, self.env)
|
||||
self._execute(cmd, env)
|
||||
|
||||
@staticmethod
|
||||
def _execute(cmd, env):
|
||||
logging.debug("pip seed by running: %s", LogCmd(cmd, env))
|
||||
process = Popen(cmd, env=env)
|
||||
process.communicate()
|
||||
if process.returncode != 0:
|
||||
msg = f"failed seed with code {process.returncode}"
|
||||
raise RuntimeError(msg)
|
||||
return process
|
||||
|
||||
@contextmanager
|
||||
def get_pip_install_cmd(self, exe, for_py_version):
|
||||
cmd = [str(exe), "-m", "pip", "-q", "install", "--only-binary", ":all:", "--disable-pip-version-check"]
|
||||
if not self.download:
|
||||
cmd.append("--no-index")
|
||||
folders = set()
|
||||
for dist, version in self.distribution_to_versions().items():
|
||||
wheel = get_wheel(
|
||||
distribution=dist,
|
||||
version=version,
|
||||
for_py_version=for_py_version,
|
||||
search_dirs=self.extra_search_dir,
|
||||
download=False,
|
||||
app_data=self.app_data,
|
||||
do_periodic_update=self.periodic_update,
|
||||
env=self.env,
|
||||
)
|
||||
if wheel is None:
|
||||
msg = f"could not get wheel for distribution {dist}"
|
||||
raise RuntimeError(msg)
|
||||
folders.add(str(wheel.path.parent))
|
||||
cmd.append(Version.as_pip_req(dist, wheel.version))
|
||||
for folder in sorted(folders):
|
||||
cmd.extend(["--find-links", str(folder)])
|
||||
yield cmd
|
||||
|
||||
|
||||
__all__ = [
|
||||
"PipInvoke",
|
||||
]
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,204 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import zipfile
|
||||
from abc import ABC, abstractmethod
|
||||
from configparser import ConfigParser
|
||||
from itertools import chain
|
||||
from pathlib import Path
|
||||
from tempfile import mkdtemp
|
||||
|
||||
from distlib.scripts import ScriptMaker, enquote_executable
|
||||
|
||||
from virtualenv.util.path import safe_delete
|
||||
|
||||
|
||||
class PipInstall(ABC):
|
||||
def __init__(self, wheel, creator, image_folder) -> None:
|
||||
self._wheel = wheel
|
||||
self._creator = creator
|
||||
self._image_dir = image_folder
|
||||
self._extracted = False
|
||||
self.__dist_info = None
|
||||
self._console_entry_points = None
|
||||
|
||||
@abstractmethod
|
||||
def _sync(self, src, dst):
|
||||
raise NotImplementedError
|
||||
|
||||
def install(self, version_info):
|
||||
self._extracted = True
|
||||
self._uninstall_previous_version()
|
||||
# sync image
|
||||
for filename in self._image_dir.iterdir():
|
||||
into = self._creator.purelib / filename.name
|
||||
self._sync(filename, into)
|
||||
# generate console executables
|
||||
consoles = set()
|
||||
script_dir = self._creator.script_dir
|
||||
for name, module in self._console_scripts.items():
|
||||
consoles.update(self._create_console_entry_point(name, module, script_dir, version_info))
|
||||
logging.debug("generated console scripts %s", " ".join(i.name for i in consoles))
|
||||
|
||||
def build_image(self):
|
||||
# 1. first extract the wheel
|
||||
logging.debug("build install image for %s to %s", self._wheel.name, self._image_dir)
|
||||
with zipfile.ZipFile(str(self._wheel)) as zip_ref:
|
||||
self._shorten_path_if_needed(zip_ref)
|
||||
zip_ref.extractall(str(self._image_dir))
|
||||
self._extracted = True
|
||||
# 2. now add additional files not present in the distribution
|
||||
new_files = self._generate_new_files()
|
||||
# 3. finally fix the records file
|
||||
self._fix_records(new_files)
|
||||
|
||||
def _shorten_path_if_needed(self, zip_ref):
|
||||
if os.name == "nt":
|
||||
to_folder = str(self._image_dir)
|
||||
# https://docs.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation
|
||||
zip_max_len = max(len(i) for i in zip_ref.namelist())
|
||||
path_len = zip_max_len + len(to_folder)
|
||||
if path_len > 260: # noqa: PLR2004
|
||||
self._image_dir.mkdir(exist_ok=True) # to get a short path must exist
|
||||
|
||||
from virtualenv.util.path import get_short_path_name # noqa: PLC0415
|
||||
|
||||
to_folder = get_short_path_name(to_folder)
|
||||
self._image_dir = Path(to_folder)
|
||||
|
||||
def _records_text(self, files):
|
||||
return "\n".join(f"{os.path.relpath(str(rec), str(self._image_dir))},," for rec in files)
|
||||
|
||||
def _generate_new_files(self):
|
||||
new_files = set()
|
||||
installer = self._dist_info / "INSTALLER"
|
||||
installer.write_text("pip\n", encoding="utf-8")
|
||||
new_files.add(installer)
|
||||
# inject a no-op root element, as workaround for bug in https://github.com/pypa/pip/issues/7226
|
||||
marker = self._image_dir / f"{self._dist_info.stem}.virtualenv"
|
||||
marker.write_text("", encoding="utf-8")
|
||||
new_files.add(marker)
|
||||
folder = mkdtemp()
|
||||
try:
|
||||
to_folder = Path(folder)
|
||||
rel = os.path.relpath(str(self._creator.script_dir), str(self._creator.purelib))
|
||||
version_info = self._creator.interpreter.version_info
|
||||
for name, module in self._console_scripts.items():
|
||||
new_files.update(
|
||||
Path(os.path.normpath(str(self._image_dir / rel / i.name)))
|
||||
for i in self._create_console_entry_point(name, module, to_folder, version_info)
|
||||
)
|
||||
finally:
|
||||
safe_delete(folder)
|
||||
return new_files
|
||||
|
||||
@property
|
||||
def _dist_info(self):
|
||||
if self._extracted is False:
|
||||
return None # pragma: no cover
|
||||
if self.__dist_info is None:
|
||||
files = []
|
||||
for filename in self._image_dir.iterdir():
|
||||
files.append(filename.name)
|
||||
if filename.suffix == ".dist-info":
|
||||
self.__dist_info = filename
|
||||
break
|
||||
else:
|
||||
msg = f"no .dist-info at {self._image_dir}, has {', '.join(files)}"
|
||||
raise RuntimeError(msg) # pragma: no cover
|
||||
return self.__dist_info
|
||||
|
||||
@abstractmethod
|
||||
def _fix_records(self, extra_record_data):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def _console_scripts(self):
|
||||
if self._extracted is False:
|
||||
return None # pragma: no cover
|
||||
if self._console_entry_points is None:
|
||||
self._console_entry_points = {}
|
||||
entry_points = self._dist_info / "entry_points.txt"
|
||||
if entry_points.exists():
|
||||
parser = ConfigParser()
|
||||
with entry_points.open(encoding="utf-8") as file_handler:
|
||||
parser.read_file(file_handler)
|
||||
if "console_scripts" in parser.sections():
|
||||
for name, value in parser.items("console_scripts"):
|
||||
match = re.match(r"(.*?)-?\d\.?\d*", name)
|
||||
our_name = match.groups(1)[0] if match else name
|
||||
self._console_entry_points[our_name] = value
|
||||
return self._console_entry_points
|
||||
|
||||
def _create_console_entry_point(self, name, value, to_folder, version_info):
|
||||
result = []
|
||||
maker = ScriptMakerCustom(to_folder, version_info, self._creator.exe, name)
|
||||
specification = f"{name} = {value}"
|
||||
new_files = maker.make(specification)
|
||||
result.extend(Path(i) for i in new_files)
|
||||
return result
|
||||
|
||||
def _uninstall_previous_version(self):
|
||||
dist_name = self._dist_info.stem.split("-")[0]
|
||||
in_folders = chain.from_iterable([i.iterdir() for i in (self._creator.purelib, self._creator.platlib)])
|
||||
paths = (p for p in in_folders if p.stem.split("-")[0] == dist_name and p.suffix == ".dist-info" and p.is_dir())
|
||||
existing_dist = next(paths, None)
|
||||
if existing_dist is not None:
|
||||
self._uninstall_dist(existing_dist)
|
||||
|
||||
@staticmethod
|
||||
def _uninstall_dist(dist):
|
||||
dist_base = dist.parent
|
||||
logging.debug("uninstall existing distribution %s from %s", dist.stem, dist_base)
|
||||
|
||||
top_txt = dist / "top_level.txt" # add top level packages at folder level
|
||||
paths = (
|
||||
{dist.parent / i.strip() for i in top_txt.read_text(encoding="utf-8").splitlines()}
|
||||
if top_txt.exists()
|
||||
else set()
|
||||
)
|
||||
paths.add(dist) # add the dist-info folder itself
|
||||
|
||||
base_dirs, record = paths.copy(), dist / "RECORD" # collect entries in record that we did not register yet
|
||||
for name in (
|
||||
(i.split(",")[0] for i in record.read_text(encoding="utf-8").splitlines()) if record.exists() else ()
|
||||
):
|
||||
path = dist_base / name
|
||||
if not any(p in base_dirs for p in path.parents): # only add if not already added as a base dir
|
||||
paths.add(path)
|
||||
|
||||
for path in sorted(paths): # actually remove stuff in a stable order
|
||||
if path.exists():
|
||||
if path.is_dir() and not path.is_symlink():
|
||||
safe_delete(path)
|
||||
else:
|
||||
path.unlink()
|
||||
|
||||
def clear(self):
|
||||
if self._image_dir.exists():
|
||||
safe_delete(self._image_dir)
|
||||
|
||||
def has_image(self):
|
||||
return self._image_dir.exists() and next(self._image_dir.iterdir()) is not None
|
||||
|
||||
|
||||
class ScriptMakerCustom(ScriptMaker):
|
||||
def __init__(self, target_dir, version_info, executable, name) -> None:
|
||||
super().__init__(None, str(target_dir))
|
||||
self.clobber = True # overwrite
|
||||
self.set_mode = True # ensure they are executable
|
||||
self.executable = enquote_executable(str(executable))
|
||||
self.version_info = version_info.major, version_info.minor
|
||||
self.variants = {"", "X", "X.Y"}
|
||||
self._name = name
|
||||
|
||||
def _write_script(self, names, shebang, script_bytes, filenames, ext):
|
||||
names.add(f"{self._name}{self.version_info[0]}.{self.version_info[1]}")
|
||||
super()._write_script(names, shebang, script_bytes, filenames, ext)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"PipInstall",
|
||||
]
|
|
@ -0,0 +1,40 @@
|
|||
from __future__ import annotations # noqa: A005
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
from virtualenv.util.path import copy
|
||||
|
||||
from .base import PipInstall
|
||||
|
||||
|
||||
class CopyPipInstall(PipInstall):
|
||||
def _sync(self, src, dst):
|
||||
copy(src, dst)
|
||||
|
||||
def _generate_new_files(self):
|
||||
# create the pyc files
|
||||
new_files = super()._generate_new_files()
|
||||
new_files.update(self._cache_files())
|
||||
return new_files
|
||||
|
||||
def _cache_files(self):
|
||||
version = self._creator.interpreter.version_info
|
||||
py_c_ext = f".{self._creator.interpreter.implementation.lower()}-{version.major}{version.minor}.pyc"
|
||||
for root, dirs, files in os.walk(str(self._image_dir), topdown=True):
|
||||
root_path = Path(root)
|
||||
for name in files:
|
||||
if name.endswith(".py"):
|
||||
yield root_path / f"{name[:-3]}{py_c_ext}"
|
||||
for name in dirs:
|
||||
yield root_path / name / "__pycache__"
|
||||
|
||||
def _fix_records(self, new_files):
|
||||
extra_record_data_str = self._records_text(new_files)
|
||||
with (self._dist_info / "RECORD").open("ab") as file_handler:
|
||||
file_handler.write(extra_record_data_str.encode("utf-8"))
|
||||
|
||||
|
||||
__all__ = [
|
||||
"CopyPipInstall",
|
||||
]
|
|
@ -0,0 +1,58 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from stat import S_IREAD, S_IRGRP, S_IROTH
|
||||
from subprocess import PIPE, Popen
|
||||
|
||||
from virtualenv.util.path import safe_delete, set_tree
|
||||
|
||||
from .base import PipInstall
|
||||
|
||||
|
||||
class SymlinkPipInstall(PipInstall):
|
||||
def _sync(self, src, dst):
|
||||
os.symlink(str(src), str(dst))
|
||||
|
||||
def _generate_new_files(self):
|
||||
# create the pyc files, as the build image will be R/O
|
||||
cmd = [str(self._creator.exe), "-m", "compileall", str(self._image_dir)]
|
||||
process = Popen(cmd, stdout=PIPE, stderr=PIPE)
|
||||
process.communicate()
|
||||
# the root pyc is shared, so we'll not symlink that - but still add the pyc files to the RECORD for close
|
||||
root_py_cache = self._image_dir / "__pycache__"
|
||||
new_files = set()
|
||||
if root_py_cache.exists():
|
||||
new_files.update(root_py_cache.iterdir())
|
||||
new_files.add(root_py_cache)
|
||||
safe_delete(root_py_cache)
|
||||
core_new_files = super()._generate_new_files()
|
||||
# remove files that are within the image folder deeper than one level (as these will be not linked directly)
|
||||
for file in core_new_files:
|
||||
try:
|
||||
rel = file.relative_to(self._image_dir)
|
||||
if len(rel.parts) > 1:
|
||||
continue
|
||||
except ValueError:
|
||||
pass
|
||||
new_files.add(file)
|
||||
return new_files
|
||||
|
||||
def _fix_records(self, new_files):
|
||||
new_files.update(i for i in self._image_dir.iterdir())
|
||||
extra_record_data_str = self._records_text(sorted(new_files, key=str))
|
||||
(self._dist_info / "RECORD").write_text(extra_record_data_str, encoding="utf-8")
|
||||
|
||||
def build_image(self):
|
||||
super().build_image()
|
||||
# protect the image by making it read only
|
||||
set_tree(self._image_dir, S_IREAD | S_IRGRP | S_IROTH)
|
||||
|
||||
def clear(self):
|
||||
if self._image_dir.exists():
|
||||
safe_delete(self._image_dir)
|
||||
super().clear()
|
||||
|
||||
|
||||
__all__ = [
|
||||
"SymlinkPipInstall",
|
||||
]
|
|
@ -0,0 +1,144 @@
|
|||
"""Bootstrap."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
from pathlib import Path
|
||||
from subprocess import CalledProcessError
|
||||
from threading import Lock, Thread
|
||||
|
||||
from virtualenv.info import fs_supports_symlink
|
||||
from virtualenv.seed.embed.base_embed import BaseEmbed
|
||||
from virtualenv.seed.wheels import get_wheel
|
||||
|
||||
from .pip_install.copy import CopyPipInstall
|
||||
from .pip_install.symlink import SymlinkPipInstall
|
||||
|
||||
|
||||
class FromAppData(BaseEmbed):
|
||||
def __init__(self, options) -> None:
|
||||
super().__init__(options)
|
||||
self.symlinks = options.symlink_app_data
|
||||
|
||||
@classmethod
|
||||
def add_parser_arguments(cls, parser, interpreter, app_data):
|
||||
super().add_parser_arguments(parser, interpreter, app_data)
|
||||
can_symlink = app_data.transient is False and fs_supports_symlink()
|
||||
sym = "" if can_symlink else "not supported - "
|
||||
parser.add_argument(
|
||||
"--symlink-app-data",
|
||||
dest="symlink_app_data",
|
||||
action="store_true" if can_symlink else "store_false",
|
||||
help=f"{sym} symlink the python packages from the app-data folder (requires seed pip>=19.3)",
|
||||
default=False,
|
||||
)
|
||||
|
||||
def run(self, creator):
|
||||
if not self.enabled:
|
||||
return
|
||||
with self._get_seed_wheels(creator) as name_to_whl:
|
||||
pip_version = name_to_whl["pip"].version_tuple if "pip" in name_to_whl else None
|
||||
installer_class = self.installer_class(pip_version)
|
||||
exceptions = {}
|
||||
|
||||
def _install(name, wheel):
|
||||
try:
|
||||
logging.debug("install %s from wheel %s via %s", name, wheel, installer_class.__name__)
|
||||
key = Path(installer_class.__name__) / wheel.path.stem
|
||||
wheel_img = self.app_data.wheel_image(creator.interpreter.version_release_str, key)
|
||||
installer = installer_class(wheel.path, creator, wheel_img)
|
||||
parent = self.app_data.lock / wheel_img.parent
|
||||
with parent.non_reentrant_lock_for_key(wheel_img.name):
|
||||
if not installer.has_image():
|
||||
installer.build_image()
|
||||
installer.install(creator.interpreter.version_info)
|
||||
except Exception: # noqa: BLE001
|
||||
exceptions[name] = sys.exc_info()
|
||||
|
||||
threads = [Thread(target=_install, args=(n, w)) for n, w in name_to_whl.items()]
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
if exceptions:
|
||||
messages = [f"failed to build image {', '.join(exceptions.keys())} because:"]
|
||||
for value in exceptions.values():
|
||||
exc_type, exc_value, exc_traceback = value
|
||||
messages.append("".join(traceback.format_exception(exc_type, exc_value, exc_traceback)))
|
||||
raise RuntimeError("\n".join(messages))
|
||||
|
||||
@contextmanager
|
||||
def _get_seed_wheels(self, creator): # noqa: C901
|
||||
name_to_whl, lock, fail = {}, Lock(), {}
|
||||
|
||||
def _get(distribution, version):
|
||||
for_py_version = creator.interpreter.version_release_str
|
||||
failure, result = None, None
|
||||
# fallback to download in case the exact version is not available
|
||||
for download in [True] if self.download else [False, True]:
|
||||
failure = None
|
||||
try:
|
||||
result = get_wheel(
|
||||
distribution=distribution,
|
||||
version=version,
|
||||
for_py_version=for_py_version,
|
||||
search_dirs=self.extra_search_dir,
|
||||
download=download,
|
||||
app_data=self.app_data,
|
||||
do_periodic_update=self.periodic_update,
|
||||
env=self.env,
|
||||
)
|
||||
if result is not None:
|
||||
break
|
||||
except Exception as exception:
|
||||
logging.exception("fail")
|
||||
failure = exception
|
||||
if failure:
|
||||
if isinstance(failure, CalledProcessError):
|
||||
msg = f"failed to download {distribution}"
|
||||
if version is not None:
|
||||
msg += f" version {version}"
|
||||
msg += f", pip download exit code {failure.returncode}"
|
||||
output = failure.output + failure.stderr
|
||||
if output:
|
||||
msg += "\n"
|
||||
msg += output
|
||||
else:
|
||||
msg = repr(failure)
|
||||
logging.error(msg)
|
||||
with lock:
|
||||
fail[distribution] = version
|
||||
else:
|
||||
with lock:
|
||||
name_to_whl[distribution] = result
|
||||
|
||||
threads = [
|
||||
Thread(target=_get, args=(distribution, version))
|
||||
for distribution, version in self.distribution_to_versions().items()
|
||||
]
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
if fail:
|
||||
msg = f"seed failed due to failing to download wheels {', '.join(fail.keys())}"
|
||||
raise RuntimeError(msg)
|
||||
yield name_to_whl
|
||||
|
||||
def installer_class(self, pip_version_tuple):
|
||||
if self.symlinks and pip_version_tuple and pip_version_tuple >= (19, 3): # symlink support requires pip 19.3+
|
||||
return SymlinkPipInstall
|
||||
return CopyPipInstall
|
||||
|
||||
def __repr__(self) -> str:
|
||||
msg = f", via={'symlink' if self.symlinks else 'copy'}, app_data_dir={self.app_data}"
|
||||
base = super().__repr__()
|
||||
return f"{base[:-1]}{msg}{base[-1]}"
|
||||
|
||||
|
||||
__all__ = [
|
||||
"FromAppData",
|
||||
]
|
|
@ -0,0 +1,43 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class Seeder(ABC):
|
||||
"""A seeder will install some seed packages into a virtual environment."""
|
||||
|
||||
def __init__(self, options, enabled) -> None:
|
||||
"""
|
||||
Create.
|
||||
|
||||
:param options: the parsed options as defined within :meth:`add_parser_arguments`
|
||||
:param enabled: a flag weather the seeder is enabled or not
|
||||
"""
|
||||
self.enabled = enabled
|
||||
self.env = options.env
|
||||
|
||||
@classmethod
|
||||
def add_parser_arguments(cls, parser, interpreter, app_data):
|
||||
"""
|
||||
Add CLI arguments for this seed mechanisms.
|
||||
|
||||
:param parser: the CLI parser
|
||||
:param app_data: the CLI parser
|
||||
:param interpreter: the interpreter this virtual environment is based of
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def run(self, creator):
|
||||
"""
|
||||
Perform the seed operation.
|
||||
|
||||
:param creator: the creator (based of :class:`virtualenv.create.creator.Creator`) we used to create this \
|
||||
virtual environment
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Seeder",
|
||||
]
|
|
@ -0,0 +1,11 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from .acquire import get_wheel, pip_wheel_env_run
|
||||
from .util import Version, Wheel
|
||||
|
||||
__all__ = [
|
||||
"Version",
|
||||
"Wheel",
|
||||
"get_wheel",
|
||||
"pip_wheel_env_run",
|
||||
]
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,132 @@
|
|||
"""Bootstrap."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from operator import eq, lt
|
||||
from pathlib import Path
|
||||
from subprocess import PIPE, CalledProcessError, Popen
|
||||
|
||||
from .bundle import from_bundle
|
||||
from .periodic_update import add_wheel_to_update_log
|
||||
from .util import Version, Wheel, discover_wheels
|
||||
|
||||
|
||||
def get_wheel( # noqa: PLR0913
|
||||
distribution,
|
||||
version,
|
||||
for_py_version,
|
||||
search_dirs,
|
||||
download,
|
||||
app_data,
|
||||
do_periodic_update,
|
||||
env,
|
||||
):
|
||||
"""Get a wheel with the given distribution-version-for_py_version trio, by using the extra search dir + download."""
|
||||
# not all wheels are compatible with all python versions, so we need to py version qualify it
|
||||
wheel = None
|
||||
|
||||
if not download or version != Version.bundle:
|
||||
# 1. acquire from bundle
|
||||
wheel = from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update, env)
|
||||
|
||||
if download and wheel is None and version != Version.embed:
|
||||
# 2. download from the internet
|
||||
wheel = download_wheel(
|
||||
distribution=distribution,
|
||||
version_spec=Version.as_version_spec(version),
|
||||
for_py_version=for_py_version,
|
||||
search_dirs=search_dirs,
|
||||
app_data=app_data,
|
||||
to_folder=app_data.house,
|
||||
env=env,
|
||||
)
|
||||
if wheel is not None and app_data.can_update:
|
||||
add_wheel_to_update_log(wheel, for_py_version, app_data)
|
||||
|
||||
return wheel
|
||||
|
||||
|
||||
def download_wheel(distribution, version_spec, for_py_version, search_dirs, app_data, to_folder, env): # noqa: PLR0913
|
||||
to_download = f"{distribution}{version_spec or ''}"
|
||||
logging.debug("download wheel %s %s to %s", to_download, for_py_version, to_folder)
|
||||
cmd = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"download",
|
||||
"--progress-bar",
|
||||
"off",
|
||||
"--disable-pip-version-check",
|
||||
"--only-binary=:all:",
|
||||
"--no-deps",
|
||||
"--python-version",
|
||||
for_py_version,
|
||||
"-d",
|
||||
str(to_folder),
|
||||
to_download,
|
||||
]
|
||||
# pip has no interface in python - must be a new sub-process
|
||||
env = pip_wheel_env_run(search_dirs, app_data, env)
|
||||
process = Popen(cmd, env=env, stdout=PIPE, stderr=PIPE, universal_newlines=True, encoding="utf-8")
|
||||
out, err = process.communicate()
|
||||
if process.returncode != 0:
|
||||
kwargs = {"output": out, "stderr": err}
|
||||
raise CalledProcessError(process.returncode, cmd, **kwargs)
|
||||
result = _find_downloaded_wheel(distribution, version_spec, for_py_version, to_folder, out)
|
||||
logging.debug("downloaded wheel %s", result.name)
|
||||
return result
|
||||
|
||||
|
||||
def _find_downloaded_wheel(distribution, version_spec, for_py_version, to_folder, out):
|
||||
for line in out.splitlines():
|
||||
stripped_line = line.lstrip()
|
||||
for marker in ("Saved ", "File was already downloaded "):
|
||||
if stripped_line.startswith(marker):
|
||||
return Wheel(Path(stripped_line[len(marker) :]).absolute())
|
||||
# if for some reason the output does not match fallback to the latest version with that spec
|
||||
return find_compatible_in_house(distribution, version_spec, for_py_version, to_folder)
|
||||
|
||||
|
||||
def find_compatible_in_house(distribution, version_spec, for_py_version, in_folder):
|
||||
wheels = discover_wheels(in_folder, distribution, None, for_py_version)
|
||||
start, end = 0, len(wheels)
|
||||
if version_spec is not None and version_spec:
|
||||
if version_spec.startswith("<"):
|
||||
from_pos, op = 1, lt
|
||||
elif version_spec.startswith("=="):
|
||||
from_pos, op = 2, eq
|
||||
else:
|
||||
raise ValueError(version_spec)
|
||||
version = Wheel.as_version_tuple(version_spec[from_pos:])
|
||||
start = next((at for at, w in enumerate(wheels) if op(w.version_tuple, version)), len(wheels))
|
||||
|
||||
return None if start == end else wheels[start]
|
||||
|
||||
|
||||
def pip_wheel_env_run(search_dirs, app_data, env):
|
||||
env = env.copy()
|
||||
env.update({"PIP_USE_WHEEL": "1", "PIP_USER": "0", "PIP_NO_INPUT": "1"})
|
||||
wheel = get_wheel(
|
||||
distribution="pip",
|
||||
version=None,
|
||||
for_py_version=f"{sys.version_info.major}.{sys.version_info.minor}",
|
||||
search_dirs=search_dirs,
|
||||
download=False,
|
||||
app_data=app_data,
|
||||
do_periodic_update=False,
|
||||
env=env,
|
||||
)
|
||||
if wheel is None:
|
||||
msg = "could not find the embedded pip"
|
||||
raise RuntimeError(msg)
|
||||
env["PYTHONPATH"] = str(wheel.path)
|
||||
return env
|
||||
|
||||
|
||||
__all__ = [
|
||||
"download_wheel",
|
||||
"get_wheel",
|
||||
"pip_wheel_env_run",
|
||||
]
|
|
@ -0,0 +1,50 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from virtualenv.seed.wheels.embed import get_embed_wheel
|
||||
|
||||
from .periodic_update import periodic_update
|
||||
from .util import Version, Wheel, discover_wheels
|
||||
|
||||
|
||||
def from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update, env): # noqa: PLR0913
|
||||
"""Load the bundled wheel to a cache directory."""
|
||||
of_version = Version.of_version(version)
|
||||
wheel = load_embed_wheel(app_data, distribution, for_py_version, of_version)
|
||||
|
||||
if version != Version.embed:
|
||||
# 2. check if we have upgraded embed
|
||||
if app_data.can_update:
|
||||
per = do_periodic_update
|
||||
wheel = periodic_update(distribution, of_version, for_py_version, wheel, search_dirs, app_data, per, env)
|
||||
|
||||
# 3. acquire from extra search dir
|
||||
found_wheel = from_dir(distribution, of_version, for_py_version, search_dirs)
|
||||
if found_wheel is not None and (wheel is None or found_wheel.version_tuple > wheel.version_tuple):
|
||||
wheel = found_wheel
|
||||
return wheel
|
||||
|
||||
|
||||
def load_embed_wheel(app_data, distribution, for_py_version, version):
|
||||
wheel = get_embed_wheel(distribution, for_py_version)
|
||||
if wheel is not None:
|
||||
version_match = version == wheel.version
|
||||
if version is None or version_match:
|
||||
with app_data.ensure_extracted(wheel.path, lambda: app_data.house) as wheel_path:
|
||||
wheel = Wheel(wheel_path)
|
||||
else: # if version does not match ignore
|
||||
wheel = None
|
||||
return wheel
|
||||
|
||||
|
||||
def from_dir(distribution, version, for_py_version, directories):
|
||||
"""Load a compatible wheel from a given folder."""
|
||||
for folder in directories:
|
||||
for wheel in discover_wheels(folder, distribution, version, for_py_version):
|
||||
return wheel
|
||||
return None
|
||||
|
||||
|
||||
__all__ = [
|
||||
"from_bundle",
|
||||
"load_embed_wheel",
|
||||
]
|
|
@ -0,0 +1,63 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from virtualenv.seed.wheels.util import Wheel
|
||||
|
||||
BUNDLE_FOLDER = Path(__file__).absolute().parent
|
||||
BUNDLE_SUPPORT = {
|
||||
"3.7": {
|
||||
"pip": "pip-24.0-py3-none-any.whl",
|
||||
"setuptools": "setuptools-68.0.0-py3-none-any.whl",
|
||||
"wheel": "wheel-0.42.0-py3-none-any.whl",
|
||||
},
|
||||
"3.8": {
|
||||
"pip": "pip-24.2-py3-none-any.whl",
|
||||
"setuptools": "setuptools-74.1.2-py3-none-any.whl",
|
||||
"wheel": "wheel-0.44.0-py3-none-any.whl",
|
||||
},
|
||||
"3.9": {
|
||||
"pip": "pip-24.2-py3-none-any.whl",
|
||||
"setuptools": "setuptools-74.1.2-py3-none-any.whl",
|
||||
"wheel": "wheel-0.44.0-py3-none-any.whl",
|
||||
},
|
||||
"3.10": {
|
||||
"pip": "pip-24.2-py3-none-any.whl",
|
||||
"setuptools": "setuptools-74.1.2-py3-none-any.whl",
|
||||
"wheel": "wheel-0.44.0-py3-none-any.whl",
|
||||
},
|
||||
"3.11": {
|
||||
"pip": "pip-24.2-py3-none-any.whl",
|
||||
"setuptools": "setuptools-74.1.2-py3-none-any.whl",
|
||||
"wheel": "wheel-0.44.0-py3-none-any.whl",
|
||||
},
|
||||
"3.12": {
|
||||
"pip": "pip-24.2-py3-none-any.whl",
|
||||
"setuptools": "setuptools-74.1.2-py3-none-any.whl",
|
||||
"wheel": "wheel-0.44.0-py3-none-any.whl",
|
||||
},
|
||||
"3.13": {
|
||||
"pip": "pip-24.2-py3-none-any.whl",
|
||||
"setuptools": "setuptools-74.1.2-py3-none-any.whl",
|
||||
"wheel": "wheel-0.44.0-py3-none-any.whl",
|
||||
},
|
||||
"3.14": {
|
||||
"pip": "pip-24.2-py3-none-any.whl",
|
||||
"setuptools": "setuptools-74.1.2-py3-none-any.whl",
|
||||
"wheel": "wheel-0.44.0-py3-none-any.whl",
|
||||
},
|
||||
}
|
||||
MAX = "3.7"
|
||||
|
||||
|
||||
def get_embed_wheel(distribution, for_py_version):
|
||||
path = BUNDLE_FOLDER / (BUNDLE_SUPPORT.get(for_py_version, {}) or BUNDLE_SUPPORT[MAX]).get(distribution)
|
||||
return Wheel.from_path(path)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"BUNDLE_FOLDER",
|
||||
"BUNDLE_SUPPORT",
|
||||
"MAX",
|
||||
"get_embed_wheel",
|
||||
]
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,427 @@
|
|||
"""Periodically update bundled versions."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import ssl
|
||||
import sys
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from itertools import groupby
|
||||
from pathlib import Path
|
||||
from shutil import copy2
|
||||
from subprocess import DEVNULL, Popen
|
||||
from textwrap import dedent
|
||||
from threading import Thread
|
||||
from urllib.error import URLError
|
||||
from urllib.request import urlopen
|
||||
|
||||
from virtualenv.app_data import AppDataDiskFolder
|
||||
from virtualenv.seed.wheels.embed import BUNDLE_SUPPORT
|
||||
from virtualenv.seed.wheels.util import Wheel
|
||||
from virtualenv.util.subprocess import CREATE_NO_WINDOW
|
||||
|
||||
GRACE_PERIOD_CI = timedelta(hours=1) # prevent version switch in the middle of a CI run
|
||||
GRACE_PERIOD_MINOR = timedelta(days=28)
|
||||
UPDATE_PERIOD = timedelta(days=14)
|
||||
UPDATE_ABORTED_DELAY = timedelta(hours=1)
|
||||
|
||||
|
||||
def periodic_update( # noqa: PLR0913
|
||||
distribution,
|
||||
of_version,
|
||||
for_py_version,
|
||||
wheel,
|
||||
search_dirs,
|
||||
app_data,
|
||||
do_periodic_update,
|
||||
env,
|
||||
):
|
||||
if do_periodic_update:
|
||||
handle_auto_update(distribution, for_py_version, wheel, search_dirs, app_data, env)
|
||||
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
|
||||
def _update_wheel(ver):
|
||||
updated_wheel = Wheel(app_data.house / ver.filename)
|
||||
logging.debug("using %supdated wheel %s", "periodically " if updated_wheel else "", updated_wheel)
|
||||
return updated_wheel
|
||||
|
||||
u_log = UpdateLog.from_app_data(app_data, distribution, for_py_version)
|
||||
if of_version is None:
|
||||
for _, group in groupby(u_log.versions, key=lambda v: v.wheel.version_tuple[0:2]):
|
||||
# use only latest patch version per minor, earlier assumed to be buggy
|
||||
all_patches = list(group)
|
||||
ignore_grace_period_minor = any(version for version in all_patches if version.use(now))
|
||||
for version in all_patches:
|
||||
if wheel is not None and Path(version.filename).name == wheel.name:
|
||||
return wheel
|
||||
if version.use(now, ignore_grace_period_minor):
|
||||
return _update_wheel(version)
|
||||
else:
|
||||
for version in u_log.versions:
|
||||
if version.wheel.version == of_version:
|
||||
return _update_wheel(version)
|
||||
|
||||
return wheel
|
||||
|
||||
|
||||
def handle_auto_update(distribution, for_py_version, wheel, search_dirs, app_data, env): # noqa: PLR0913
|
||||
embed_update_log = app_data.embed_update_log(distribution, for_py_version)
|
||||
u_log = UpdateLog.from_dict(embed_update_log.read())
|
||||
if u_log.needs_update:
|
||||
u_log.periodic = True
|
||||
u_log.started = datetime.now(tz=timezone.utc)
|
||||
embed_update_log.write(u_log.to_dict())
|
||||
trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, periodic=True, env=env)
|
||||
|
||||
|
||||
def add_wheel_to_update_log(wheel, for_py_version, app_data):
|
||||
embed_update_log = app_data.embed_update_log(wheel.distribution, for_py_version)
|
||||
logging.debug("adding %s information to %s", wheel.name, embed_update_log.file)
|
||||
u_log = UpdateLog.from_dict(embed_update_log.read())
|
||||
if any(version.filename == wheel.name for version in u_log.versions):
|
||||
logging.warning("%s already present in %s", wheel.name, embed_update_log.file)
|
||||
return
|
||||
# we don't need a release date for sources other than "periodic"
|
||||
version = NewVersion(wheel.name, datetime.now(tz=timezone.utc), None, "download")
|
||||
u_log.versions.append(version) # always write at the end for proper updates
|
||||
embed_update_log.write(u_log.to_dict())
|
||||
|
||||
|
||||
DATETIME_FMT = "%Y-%m-%dT%H:%M:%S.%fZ"
|
||||
|
||||
|
||||
def dump_datetime(value):
|
||||
return None if value is None else value.strftime(DATETIME_FMT)
|
||||
|
||||
|
||||
def load_datetime(value):
|
||||
return None if value is None else datetime.strptime(value, DATETIME_FMT).replace(tzinfo=timezone.utc)
|
||||
|
||||
|
||||
class NewVersion: # noqa: PLW1641
|
||||
def __init__(self, filename, found_date, release_date, source) -> None:
|
||||
self.filename = filename
|
||||
self.found_date = found_date
|
||||
self.release_date = release_date
|
||||
self.source = source
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, dictionary):
|
||||
return cls(
|
||||
filename=dictionary["filename"],
|
||||
found_date=load_datetime(dictionary["found_date"]),
|
||||
release_date=load_datetime(dictionary["release_date"]),
|
||||
source=dictionary["source"],
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"filename": self.filename,
|
||||
"release_date": dump_datetime(self.release_date),
|
||||
"found_date": dump_datetime(self.found_date),
|
||||
"source": self.source,
|
||||
}
|
||||
|
||||
def use(self, now, ignore_grace_period_minor=False, ignore_grace_period_ci=False): # noqa: FBT002
|
||||
if self.source == "manual":
|
||||
return True
|
||||
if self.source == "periodic" and (self.found_date < now - GRACE_PERIOD_CI or ignore_grace_period_ci):
|
||||
if not ignore_grace_period_minor:
|
||||
compare_from = self.release_date or self.found_date
|
||||
return now - compare_from >= GRACE_PERIOD_MINOR
|
||||
return True
|
||||
return False
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return (
|
||||
f"{self.__class__.__name__}(filename={self.filename}), found_date={self.found_date}, "
|
||||
f"release_date={self.release_date}, source={self.source})"
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
return type(self) == type(other) and all( # noqa: E721
|
||||
getattr(self, k) == getattr(other, k) for k in ["filename", "release_date", "found_date", "source"]
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
@property
|
||||
def wheel(self):
|
||||
return Wheel(Path(self.filename))
|
||||
|
||||
|
||||
class UpdateLog:
|
||||
def __init__(self, started, completed, versions, periodic) -> None:
|
||||
self.started = started
|
||||
self.completed = completed
|
||||
self.versions = versions
|
||||
self.periodic = periodic
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, dictionary):
|
||||
if dictionary is None:
|
||||
dictionary = {}
|
||||
return cls(
|
||||
load_datetime(dictionary.get("started")),
|
||||
load_datetime(dictionary.get("completed")),
|
||||
[NewVersion.from_dict(v) for v in dictionary.get("versions", [])],
|
||||
dictionary.get("periodic"),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_app_data(cls, app_data, distribution, for_py_version):
|
||||
raw_json = app_data.embed_update_log(distribution, for_py_version).read()
|
||||
return cls.from_dict(raw_json)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"started": dump_datetime(self.started),
|
||||
"completed": dump_datetime(self.completed),
|
||||
"periodic": self.periodic,
|
||||
"versions": [r.to_dict() for r in self.versions],
|
||||
}
|
||||
|
||||
@property
|
||||
def needs_update(self):
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
if self.completed is None: # never completed
|
||||
return self._check_start(now)
|
||||
if now - self.completed <= UPDATE_PERIOD:
|
||||
return False
|
||||
return self._check_start(now)
|
||||
|
||||
def _check_start(self, now):
|
||||
return self.started is None or now - self.started > UPDATE_ABORTED_DELAY
|
||||
|
||||
|
||||
def trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, env, periodic): # noqa: PLR0913
|
||||
wheel_path = None if wheel is None else str(wheel.path)
|
||||
cmd = [
|
||||
sys.executable,
|
||||
"-c",
|
||||
dedent(
|
||||
"""
|
||||
from virtualenv.report import setup_report, MAX_LEVEL
|
||||
from virtualenv.seed.wheels.periodic_update import do_update
|
||||
setup_report(MAX_LEVEL, show_pid=True)
|
||||
do_update({!r}, {!r}, {!r}, {!r}, {!r}, {!r})
|
||||
""",
|
||||
)
|
||||
.strip()
|
||||
.format(distribution, for_py_version, wheel_path, str(app_data), [str(p) for p in search_dirs], periodic),
|
||||
]
|
||||
debug = env.get("_VIRTUALENV_PERIODIC_UPDATE_INLINE") == "1"
|
||||
pipe = None if debug else DEVNULL
|
||||
kwargs = {"stdout": pipe, "stderr": pipe}
|
||||
if not debug and sys.platform == "win32":
|
||||
kwargs["creationflags"] = CREATE_NO_WINDOW
|
||||
process = Popen(cmd, **kwargs)
|
||||
logging.info(
|
||||
"triggered periodic upgrade of %s%s (for python %s) via background process having PID %d",
|
||||
distribution,
|
||||
"" if wheel is None else f"=={wheel.version}",
|
||||
for_py_version,
|
||||
process.pid,
|
||||
)
|
||||
if debug:
|
||||
process.communicate() # on purpose not called to make it a background process
|
||||
else:
|
||||
# set the returncode here -> no ResourceWarning on main process exit if the subprocess still runs
|
||||
process.returncode = 0
|
||||
|
||||
|
||||
def do_update(distribution, for_py_version, embed_filename, app_data, search_dirs, periodic): # noqa: PLR0913
|
||||
versions = None
|
||||
try:
|
||||
versions = _run_do_update(app_data, distribution, embed_filename, for_py_version, periodic, search_dirs)
|
||||
finally:
|
||||
logging.debug("done %s %s with %s", distribution, for_py_version, versions)
|
||||
return versions
|
||||
|
||||
|
||||
def _run_do_update( # noqa: C901, PLR0913
|
||||
app_data,
|
||||
distribution,
|
||||
embed_filename,
|
||||
for_py_version,
|
||||
periodic,
|
||||
search_dirs,
|
||||
):
|
||||
from virtualenv.seed.wheels import acquire # noqa: PLC0415
|
||||
|
||||
wheel_filename = None if embed_filename is None else Path(embed_filename)
|
||||
embed_version = None if wheel_filename is None else Wheel(wheel_filename).version_tuple
|
||||
app_data = AppDataDiskFolder(app_data) if isinstance(app_data, str) else app_data
|
||||
search_dirs = [Path(p) if isinstance(p, str) else p for p in search_dirs]
|
||||
wheelhouse = app_data.house
|
||||
embed_update_log = app_data.embed_update_log(distribution, for_py_version)
|
||||
u_log = UpdateLog.from_dict(embed_update_log.read())
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
|
||||
update_versions, other_versions = [], []
|
||||
for version in u_log.versions:
|
||||
if version.source in {"periodic", "manual"}:
|
||||
update_versions.append(version)
|
||||
else:
|
||||
other_versions.append(version)
|
||||
|
||||
if periodic:
|
||||
source = "periodic"
|
||||
else:
|
||||
source = "manual"
|
||||
# mark the most recent one as source "manual"
|
||||
if update_versions:
|
||||
update_versions[0].source = source
|
||||
|
||||
if wheel_filename is not None:
|
||||
dest = wheelhouse / wheel_filename.name
|
||||
if not dest.exists():
|
||||
copy2(str(wheel_filename), str(wheelhouse))
|
||||
last, last_version, versions, filenames = None, None, [], set()
|
||||
while last is None or not last.use(now, ignore_grace_period_ci=True):
|
||||
download_time = datetime.now(tz=timezone.utc)
|
||||
dest = acquire.download_wheel(
|
||||
distribution=distribution,
|
||||
version_spec=None if last_version is None else f"<{last_version}",
|
||||
for_py_version=for_py_version,
|
||||
search_dirs=search_dirs,
|
||||
app_data=app_data,
|
||||
to_folder=wheelhouse,
|
||||
env=os.environ,
|
||||
)
|
||||
if dest is None or (update_versions and update_versions[0].filename == dest.name):
|
||||
break
|
||||
release_date = release_date_for_wheel_path(dest.path)
|
||||
last = NewVersion(filename=dest.path.name, release_date=release_date, found_date=download_time, source=source)
|
||||
logging.info("detected %s in %s", last, datetime.now(tz=timezone.utc) - download_time)
|
||||
versions.append(last)
|
||||
filenames.add(last.filename)
|
||||
last_wheel = last.wheel
|
||||
last_version = last_wheel.version
|
||||
if embed_version is not None and embed_version >= last_wheel.version_tuple:
|
||||
break # stop download if we reach the embed version
|
||||
u_log.periodic = periodic
|
||||
if not u_log.periodic:
|
||||
u_log.started = now
|
||||
# update other_versions by removing version we just found
|
||||
other_versions = [version for version in other_versions if version.filename not in filenames]
|
||||
u_log.versions = versions + update_versions + other_versions
|
||||
u_log.completed = datetime.now(tz=timezone.utc)
|
||||
embed_update_log.write(u_log.to_dict())
|
||||
return versions
|
||||
|
||||
|
||||
def release_date_for_wheel_path(dest):
|
||||
wheel = Wheel(dest)
|
||||
# the most accurate is to ask PyPi - e.g. https://pypi.org/pypi/pip/json,
|
||||
# see https://warehouse.pypa.io/api-reference/json/ for more details
|
||||
content = _pypi_get_distribution_info_cached(wheel.distribution)
|
||||
if content is not None:
|
||||
try:
|
||||
upload_time = content["releases"][wheel.version][0]["upload_time"]
|
||||
return datetime.strptime(upload_time, "%Y-%m-%dT%H:%M:%S").replace(tzinfo=timezone.utc)
|
||||
except Exception as exception: # noqa: BLE001
|
||||
logging.error("could not load release date %s because %r", content, exception) # noqa: TRY400
|
||||
return None
|
||||
|
||||
|
||||
def _request_context():
|
||||
yield None
|
||||
# fallback to non verified HTTPS (the information we request is not sensitive, so fallback)
|
||||
yield ssl._create_unverified_context() # noqa: S323, SLF001
|
||||
|
||||
|
||||
_PYPI_CACHE = {}
|
||||
|
||||
|
||||
def _pypi_get_distribution_info_cached(distribution):
|
||||
if distribution not in _PYPI_CACHE:
|
||||
_PYPI_CACHE[distribution] = _pypi_get_distribution_info(distribution)
|
||||
return _PYPI_CACHE[distribution]
|
||||
|
||||
|
||||
def _pypi_get_distribution_info(distribution):
|
||||
content, url = None, f"https://pypi.org/pypi/{distribution}/json"
|
||||
try:
|
||||
for context in _request_context():
|
||||
try:
|
||||
with urlopen(url, context=context) as file_handler: # noqa: S310
|
||||
content = json.load(file_handler)
|
||||
break
|
||||
except URLError as exception:
|
||||
logging.error("failed to access %s because %r", url, exception) # noqa: TRY400
|
||||
except Exception as exception: # noqa: BLE001
|
||||
logging.error("failed to access %s because %r", url, exception) # noqa: TRY400
|
||||
return content
|
||||
|
||||
|
||||
def manual_upgrade(app_data, env):
|
||||
threads = []
|
||||
|
||||
for for_py_version, distribution_to_package in BUNDLE_SUPPORT.items():
|
||||
# load extra search dir for the given for_py
|
||||
for distribution in distribution_to_package:
|
||||
thread = Thread(target=_run_manual_upgrade, args=(app_data, distribution, for_py_version, env))
|
||||
thread.start()
|
||||
threads.append(thread)
|
||||
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
|
||||
def _run_manual_upgrade(app_data, distribution, for_py_version, env):
|
||||
start = datetime.now(tz=timezone.utc)
|
||||
from .bundle import from_bundle # noqa: PLC0415
|
||||
|
||||
current = from_bundle(
|
||||
distribution=distribution,
|
||||
version=None,
|
||||
for_py_version=for_py_version,
|
||||
search_dirs=[],
|
||||
app_data=app_data,
|
||||
do_periodic_update=False,
|
||||
env=env,
|
||||
)
|
||||
logging.warning(
|
||||
"upgrade %s for python %s with current %s",
|
||||
distribution,
|
||||
for_py_version,
|
||||
"" if current is None else current.name,
|
||||
)
|
||||
versions = do_update(
|
||||
distribution=distribution,
|
||||
for_py_version=for_py_version,
|
||||
embed_filename=current.path,
|
||||
app_data=app_data,
|
||||
search_dirs=[],
|
||||
periodic=False,
|
||||
)
|
||||
|
||||
args = [
|
||||
distribution,
|
||||
for_py_version,
|
||||
datetime.now(tz=timezone.utc) - start,
|
||||
]
|
||||
if versions:
|
||||
args.append("\n".join(f"\t{v}" for v in versions))
|
||||
ver_update = "new entries found:\n%s" if versions else "no new versions found"
|
||||
msg = f"upgraded %s for python %s in %s {ver_update}"
|
||||
logging.warning(msg, *args)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"NewVersion",
|
||||
"UpdateLog",
|
||||
"add_wheel_to_update_log",
|
||||
"do_update",
|
||||
"dump_datetime",
|
||||
"load_datetime",
|
||||
"manual_upgrade",
|
||||
"periodic_update",
|
||||
"release_date_for_wheel_path",
|
||||
"trigger_update",
|
||||
]
|
|
@ -0,0 +1,121 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from operator import attrgetter
|
||||
from zipfile import ZipFile
|
||||
|
||||
|
||||
class Wheel:
|
||||
def __init__(self, path) -> None:
|
||||
# https://www.python.org/dev/peps/pep-0427/#file-name-convention
|
||||
# The wheel filename is {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl
|
||||
self.path = path
|
||||
self._parts = path.stem.split("-")
|
||||
|
||||
@classmethod
|
||||
def from_path(cls, path):
|
||||
if path is not None and path.suffix == ".whl" and len(path.stem.split("-")) >= 5: # noqa: PLR2004
|
||||
return cls(path)
|
||||
return None
|
||||
|
||||
@property
|
||||
def distribution(self):
|
||||
return self._parts[0]
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return self._parts[1]
|
||||
|
||||
@property
|
||||
def version_tuple(self):
|
||||
return self.as_version_tuple(self.version)
|
||||
|
||||
@staticmethod
|
||||
def as_version_tuple(version):
|
||||
result = []
|
||||
for part in version.split(".")[0:3]:
|
||||
try:
|
||||
result.append(int(part))
|
||||
except ValueError: # noqa: PERF203
|
||||
break
|
||||
if not result:
|
||||
raise ValueError(version)
|
||||
return tuple(result)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.path.name
|
||||
|
||||
def support_py(self, py_version):
|
||||
name = f"{'-'.join(self.path.stem.split('-')[0:2])}.dist-info/METADATA"
|
||||
with ZipFile(str(self.path), "r") as zip_file:
|
||||
metadata = zip_file.read(name).decode("utf-8")
|
||||
marker = "Requires-Python:"
|
||||
requires = next((i[len(marker) :] for i in metadata.splitlines() if i.startswith(marker)), None)
|
||||
if requires is None: # if it does not specify a python requires the assumption is compatible
|
||||
return True
|
||||
py_version_int = tuple(int(i) for i in py_version.split("."))
|
||||
for require in (i.strip() for i in requires.split(",")):
|
||||
# https://www.python.org/dev/peps/pep-0345/#version-specifiers
|
||||
for operator, check in [
|
||||
("!=", lambda v: py_version_int != v),
|
||||
("==", lambda v: py_version_int == v),
|
||||
("<=", lambda v: py_version_int <= v),
|
||||
(">=", lambda v: py_version_int >= v),
|
||||
("<", lambda v: py_version_int < v),
|
||||
(">", lambda v: py_version_int > v),
|
||||
]:
|
||||
if require.startswith(operator):
|
||||
ver_str = require[len(operator) :].strip()
|
||||
version = tuple((int(i) if i != "*" else None) for i in ver_str.split("."))[0:2]
|
||||
if not check(version):
|
||||
return False
|
||||
break
|
||||
return True
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}({self.path})"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self.path)
|
||||
|
||||
|
||||
def discover_wheels(from_folder, distribution, version, for_py_version):
|
||||
wheels = []
|
||||
for filename in from_folder.iterdir():
|
||||
wheel = Wheel.from_path(filename)
|
||||
if (
|
||||
wheel
|
||||
and wheel.distribution == distribution
|
||||
and (version is None or wheel.version == version)
|
||||
and wheel.support_py(for_py_version)
|
||||
):
|
||||
wheels.append(wheel)
|
||||
return sorted(wheels, key=attrgetter("version_tuple", "distribution"), reverse=True)
|
||||
|
||||
|
||||
class Version:
|
||||
#: the version bundled with virtualenv
|
||||
bundle = "bundle"
|
||||
embed = "embed"
|
||||
#: custom version handlers
|
||||
non_version = (bundle, embed)
|
||||
|
||||
@staticmethod
|
||||
def of_version(value):
|
||||
return None if value in Version.non_version else value
|
||||
|
||||
@staticmethod
|
||||
def as_pip_req(distribution, version):
|
||||
return f"{distribution}{Version.as_version_spec(version)}"
|
||||
|
||||
@staticmethod
|
||||
def as_version_spec(version):
|
||||
of_version = Version.of_version(version)
|
||||
return "" if of_version is None else f"=={of_version}"
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Version",
|
||||
"Wheel",
|
||||
"discover_wheels",
|
||||
]
|
Loading…
Add table
Add a link
Reference in a new issue