basebuilder_pel7ppc64lebuilder0
2 years ago
41 changed files with 40066 additions and 1359 deletions
@ -1,50 +1,75 @@ |
|||||||
diff -up Python-2.7.3/Makefile.pre.in.no-static-lib Python-2.7.3/Makefile.pre.in |
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 |
||||||
--- Python-2.7.3/Makefile.pre.in.no-static-lib 2013-02-19 14:03:40.801993224 -0500 |
From: David Malcolm <dmalcolm@redhat.com> |
||||||
+++ Python-2.7.3/Makefile.pre.in 2013-02-19 14:04:44.070988898 -0500 |
Date: Mon, 18 Jan 2010 17:59:07 +0000 |
||||||
@@ -397,7 +397,7 @@ coverage: |
Subject: [PATCH] 00111: Don't try to build a libpythonMAJOR.MINOR.a |
||||||
|
MIME-Version: 1.0 |
||||||
|
Content-Type: text/plain; charset=UTF-8 |
||||||
|
Content-Transfer-Encoding: 8bit |
||||||
|
|
||||||
|
Downstream only: not appropriate for upstream. |
||||||
|
|
||||||
|
See https://bugzilla.redhat.com/show_bug.cgi?id=556092 |
||||||
|
|
||||||
|
Co-authored-by: David Malcolm <dmalcolm@redhat.com> |
||||||
|
Co-authored-by: Bohuslav Kabrda <bkabrda@redhat.com> |
||||||
|
Co-authored-by: Matej Stuchlik <mstuchli@redhat.com> |
||||||
|
Co-authored-by: Robert Kuska <rkuska@redhat.com> |
||||||
|
Co-authored-by: Charalampos Stratakis <cstratak@redhat.com> |
||||||
|
Co-authored-by: Miro Hrončok <miro@hroncok.cz> |
||||||
|
--- |
||||||
|
Makefile.pre.in | 21 ++------------------- |
||||||
|
1 file changed, 2 insertions(+), 19 deletions(-) |
||||||
|
|
||||||
|
diff --git a/Makefile.pre.in b/Makefile.pre.in |
||||||
|
index e87a7620c4..381a8ab879 100644 |
||||||
|
--- a/Makefile.pre.in |
||||||
|
+++ b/Makefile.pre.in |
||||||
|
@@ -565,7 +565,7 @@ clinic: check-clean-src $(srcdir)/Modules/_blake2/blake2s_impl.c |
||||||
|
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/clinic/clinic.py --make --srcdir $(srcdir) |
||||||
|
|
||||||
# Build the interpreter |
# Build the interpreter |
||||||
-$(BUILDPYTHON): Modules/python.o $(LIBRARY) $(LDLIBRARY) |
-$(BUILDPYTHON): Programs/python.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY) |
||||||
+$(BUILDPYTHON): Modules/python.o $(LDLIBRARY) |
+$(BUILDPYTHON): Programs/python.o $(LDLIBRARY) $(PY3LIBRARY) |
||||||
$(LINKCC) $(CFLAGS) $(LDFLAGS) $(LINKFORSHARED) -o $@ \ |
$(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) |
||||||
Modules/python.o \ |
|
||||||
$(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) |
platform: $(BUILDPYTHON) pybuilddir.txt |
||||||
@@ -413,18 +413,6 @@ sharedmods: $(BUILDPYTHON) |
@@ -613,12 +613,6 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o |
||||||
$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ |
_TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ |
||||||
$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build |
$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build |
||||||
|
|
||||||
|
- |
||||||
-# Build static library |
-# Build static library |
||||||
-# avoid long command lines, same as LIBRARY_OBJS |
|
||||||
-$(LIBRARY): $(LIBRARY_OBJS) |
-$(LIBRARY): $(LIBRARY_OBJS) |
||||||
- -rm -f $@ |
- -rm -f $@ |
||||||
- $(AR) $(ARFLAGS) $@ Modules/getbuildinfo.o |
- $(AR) $(ARFLAGS) $@ $(LIBRARY_OBJS) |
||||||
- $(AR) $(ARFLAGS) $@ $(PARSER_OBJS) |
|
||||||
- $(AR) $(ARFLAGS) $@ $(OBJECT_OBJS) |
|
||||||
- $(AR) $(ARFLAGS) $@ $(PYTHON_OBJS) |
|
||||||
- $(AR) $(ARFLAGS) $@ $(MODULE_OBJS) $(SIGNAL_OBJS) |
|
||||||
- $(AR) $(ARFLAGS) $@ $(MODOBJS) |
|
||||||
- $(RANLIB) $@ |
|
||||||
- |
- |
||||||
libpython$(VERSION).so: $(LIBRARY_OBJS) |
libpython$(LDVERSION).so: $(LIBRARY_OBJS) $(DTRACE_OBJS) |
||||||
if test $(INSTSONAME) != $(LDLIBRARY); then \ |
if test $(INSTSONAME) != $(LDLIBRARY); then \ |
||||||
$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \ |
$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM); \ |
||||||
@@ -1021,18 +1009,6 @@ libainstall: all python-config |
@@ -696,7 +690,7 @@ Makefile Modules/config.c: Makefile.pre \ |
||||||
|
@echo "The Makefile was updated, you may need to re-run make." |
||||||
|
|
||||||
|
|
||||||
|
-Programs/_testembed: Programs/_testembed.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY) |
||||||
|
+Programs/_testembed: Programs/_testembed.o $(LDLIBRARY) $(PY3LIBRARY) |
||||||
|
$(LINKCC) $(PY_CORE_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) |
||||||
|
|
||||||
|
############################################################################ |
||||||
|
@@ -1579,17 +1573,6 @@ libainstall: @DEF_MAKE_RULE@ python-config |
||||||
else true; \ |
else true; \ |
||||||
fi; \ |
fi; \ |
||||||
done |
done |
||||||
- @if test -d $(LIBRARY); then :; else \ |
- @if test -d $(LIBRARY); then :; else \ |
||||||
- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \ |
- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \ |
||||||
- if test "$(SO)" = .dll; then \ |
- if test "$(SHLIB_SUFFIX)" = .dll; then \ |
||||||
- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \ |
- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \ |
||||||
- else \ |
- else \ |
||||||
- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \ |
- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \ |
||||||
- $(RANLIB) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \ |
|
||||||
- fi; \ |
- fi; \ |
||||||
- else \ |
- else \ |
||||||
- echo Skip install of $(LIBRARY) - use make frameworkinstall; \ |
- echo Skip install of $(LIBRARY) - use make frameworkinstall; \ |
||||||
- fi; \ |
- fi; \ |
||||||
- fi |
- fi |
||||||
$(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c |
$(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c |
||||||
$(INSTALL_DATA) Modules/python.o $(DESTDIR)$(LIBPL)/python.o |
$(INSTALL_DATA) Programs/python.o $(DESTDIR)$(LIBPL)/python.o |
||||||
$(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in |
$(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in |
||||||
|
@ -1,11 +1,7 @@ |
|||||||
Copyright (C) 2004 by Daniel Stenberg et al |
Copyright 2019 pyproject-rpm-macros contributors |
||||||
|
|
||||||
Permission to use, copy, modify, and distribute this software and its |
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: |
||||||
documentation for any purpose and without fee is hereby granted, provided |
|
||||||
that the above copyright notice appear in all copies and that both that |
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. |
||||||
copyright notice and this permission notice appear in supporting |
|
||||||
documentation, and that the name of M.I.T. not be used in advertising or |
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
||||||
publicity pertaining to distribution of the software without specific, |
|
||||||
written prior permission. M.I.T. makes no representations about the |
|
||||||
suitability of this software for any purpose. It is provided "as is" |
|
||||||
without express or implied warranty. |
|
||||||
|
@ -0,0 +1,502 @@ |
|||||||
|
pyproject RPM macros |
||||||
|
==================== |
||||||
|
|
||||||
|
These macros allow projects that follow the Python [packaging specifications] |
||||||
|
to be packaged as RPMs. |
||||||
|
|
||||||
|
They work for: |
||||||
|
|
||||||
|
* traditional Setuptools-based projects that use the `setup.py` file, |
||||||
|
* newer Setuptools-based projects that have a `setup.cfg` file, |
||||||
|
* general Python projects that use the [PEP 517] `pyproject.toml` file (which allows using any build system, such as setuptools, flit or poetry). |
||||||
|
|
||||||
|
These macros replace `%py3_build` and `%py3_install`, which only work with `setup.py`. |
||||||
|
|
||||||
|
[packaging specifications]: https://packaging.python.org/specifications/ |
||||||
|
|
||||||
|
|
||||||
|
Usage |
||||||
|
----- |
||||||
|
|
||||||
|
To use these macros, first BuildRequire the devel package for the Python you |
||||||
|
are building against. In Fedora, that's `python3-devel`. |
||||||
|
|
||||||
|
BuildRequires: python3-devel |
||||||
|
|
||||||
|
The macros will be pulled in as a dependency on Fedora and EPEL 9+. |
||||||
|
In other distributions you need to BuildRequire the macros as well: |
||||||
|
|
||||||
|
BuildRequires: python3-devel |
||||||
|
BuildRequires: pyproject-rpm-macros |
||||||
|
|
||||||
|
Next, you need to generate more build dependencies (of your projects and |
||||||
|
the macros themselves) by running `%pyproject_buildrequires` in the |
||||||
|
`%generate_buildrequires` section: |
||||||
|
|
||||||
|
%generate_buildrequires |
||||||
|
%pyproject_buildrequires |
||||||
|
|
||||||
|
This will add build dependencies according to [PEP 517] and [PEP 518]. |
||||||
|
This also adds run-time dependencies by default and |
||||||
|
can add test-time dependencies, see the section below. |
||||||
|
If you need more dependencies, such as non-Python libraries, BuildRequire |
||||||
|
them manually. |
||||||
|
|
||||||
|
Note that `%generate_buildrequires` may produce error messages `(exit 11)` in |
||||||
|
the build log. This is expected behavior of BuildRequires generators; see |
||||||
|
[the Fedora change] for details. |
||||||
|
|
||||||
|
[the Fedora change]: https://fedoraproject.org/wiki/Changes/DynamicBuildRequires |
||||||
|
|
||||||
|
Then, build a wheel in `%build` with `%pyproject_wheel`: |
||||||
|
|
||||||
|
%build |
||||||
|
%pyproject_wheel |
||||||
|
|
||||||
|
And install the wheel in `%install` with `%pyproject_install`: |
||||||
|
|
||||||
|
%install |
||||||
|
%pyproject_install |
||||||
|
|
||||||
|
`%pyproject_install` installs all wheels in `pyproject-wheeldir/` located in the root of the source tree. |
||||||
|
|
||||||
|
|
||||||
|
Adding run-time and test-time dependencies |
||||||
|
------------------------------------------ |
||||||
|
|
||||||
|
To run tests or import checks in the `%check` section, |
||||||
|
the package's runtime dependencies need to also be included as build requirements. |
||||||
|
|
||||||
|
Hence, `%pyproject_buildrequires` also generates runtime dependencies by default. |
||||||
|
|
||||||
|
For this to work, the project's build system must support the [prepare-metadata-for-build-wheel hook]. |
||||||
|
The popular buildsystems (setuptools, flit, poetry) do support it. |
||||||
|
|
||||||
|
This behavior can be disabled |
||||||
|
(e.g. when the project's build system does not support it) |
||||||
|
using the `-R` flag: |
||||||
|
|
||||||
|
%generate_buildrequires |
||||||
|
%pyproject_buildrequires -R |
||||||
|
|
||||||
|
Alternatively, the runtime dependencies can be obtained by building the wheel and reading the metadata from the built wheel. |
||||||
|
This can be enabled by using the `-w` flag. |
||||||
|
Support for building wheels with `%pyproject_buildrequires -w` is **provisional** and the behavior might change. |
||||||
|
Please subscribe to Fedora's [python-devel list] if you use the option. |
||||||
|
|
||||||
|
%generate_buildrequires |
||||||
|
%pyproject_buildrequires -w |
||||||
|
|
||||||
|
When this is used, the wheel is going to be built at least twice, |
||||||
|
becasue the `%generate_buildrequires` section runs repeatedly. |
||||||
|
To avoid accidentally reusing a wheel leaking from a previous (different) build, |
||||||
|
it cannot be reused between `%generate_buildrequires` rounds. |
||||||
|
Contrarily to that, rebuilding the wheel again in the `%build` section is redundant |
||||||
|
and the packager can omit the `%build` section entirely |
||||||
|
to reuse the wheel built from the last round of `%generate_buildrequires`. |
||||||
|
Be extra careful when attempting to modify the sources after `%pyproject_buildrequires`, |
||||||
|
e.g. when running extra commands in the `%build` section: |
||||||
|
|
||||||
|
%build |
||||||
|
cython src/wrong.pyx # this is too late with %%pyproject_buildrequires -w |
||||||
|
%pyproject_wheel |
||||||
|
|
||||||
|
For projects that specify test requirements using an [`extra` |
||||||
|
provide](https://packaging.python.org/specifications/core-metadata/#provides-extra-multiple-use), |
||||||
|
these can be added using the `-x` flag. |
||||||
|
Multiple extras can be supplied by repeating the flag or as a comma separated list. |
||||||
|
For example, if upstream suggests installing test dependencies with |
||||||
|
`pip install mypackage[testing]`, the test deps would be generated by: |
||||||
|
|
||||||
|
%generate_buildrequires |
||||||
|
%pyproject_buildrequires -x testing |
||||||
|
|
||||||
|
For projects that specify test requirements in their [tox] configuration, |
||||||
|
these can be added using the `-t` flag (default tox environment) |
||||||
|
or the `-e` flag followed by the tox environment. |
||||||
|
The default tox environment (such as `py37` assuming the Fedora's Python version is 3.7) |
||||||
|
is available in the `%{toxenv}` macro. |
||||||
|
For example, if upstream suggests running the tests on Python 3.7 with `tox -e py37`, |
||||||
|
the test deps would be generated by: |
||||||
|
|
||||||
|
%generate_buildrequires |
||||||
|
%pyproject_buildrequires -t |
||||||
|
|
||||||
|
If upstream uses a custom derived environment, such as `py37-unit`, use: |
||||||
|
|
||||||
|
%pyproject_buildrequires -e %{toxenv}-unit |
||||||
|
|
||||||
|
Or specify more environments if needed: |
||||||
|
|
||||||
|
%pyproject_buildrequires -e %{toxenv}-unit,%{toxenv}-integration |
||||||
|
|
||||||
|
The `-e` option redefines `%{toxenv}` for further reuse. |
||||||
|
Use `%{default_toxenv}` to get the default value. |
||||||
|
|
||||||
|
The `-t`/`-e` option uses [tox-current-env]'s `--print-deps-to-file` behind the scenes. |
||||||
|
|
||||||
|
If your package specifies some tox plugins in `tox.requires`, |
||||||
|
such plugins will be BuildRequired as well. |
||||||
|
Not all plugins are guaranteed to play well with [tox-current-env], |
||||||
|
in worst case, patch/sed the requirement out from the tox configuration. |
||||||
|
|
||||||
|
Note that both `-x` and `-t` imply `-r`, |
||||||
|
because runtime dependencies are always required for testing. |
||||||
|
You can only use those options if the build backend supports the [prepare-metadata-for-build-wheel hook], |
||||||
|
or together with `-w`. |
||||||
|
|
||||||
|
[tox]: https://tox.readthedocs.io/ |
||||||
|
[tox-current-env]: https://github.com/fedora-python/tox-current-env/ |
||||||
|
[prepare-metadata-for-build-wheel hook]: https://www.python.org/dev/peps/pep-0517/#prepare-metadata-for-build-wheel |
||||||
|
|
||||||
|
Additionally to generated requirements you can supply multiple file names to `%pyproject_buildrequires` macro. |
||||||
|
Dependencies will be loaded from them: |
||||||
|
|
||||||
|
%pyproject_buildrequires -r requirements/tests.in requirements/docs.in requirements/dev.in |
||||||
|
|
||||||
|
For packages not using build system you can use `-N` to entirely skip automatical |
||||||
|
generation of requirements and install requirements only from manually specified files. |
||||||
|
`-N` option cannot be used in combination with other options mentioned above |
||||||
|
(`-r`, `-w`, `-e`, `-t`, `-x`). |
||||||
|
|
||||||
|
Running tox based tests |
||||||
|
----------------------- |
||||||
|
|
||||||
|
In case you want to run the tests as specified in [tox] configuration, |
||||||
|
you must use `%pyproject_buildrequires` with `-t` or `-e` as explained above. |
||||||
|
Then, use the `%tox` macro in `%check`: |
||||||
|
|
||||||
|
%check |
||||||
|
%tox |
||||||
|
|
||||||
|
The macro: |
||||||
|
|
||||||
|
- Always prepends `$PATH` with `%{buildroot}%{_bindir}` |
||||||
|
- If not defined, sets `$PYTHONPATH` to `%{buildroot}%{python3_sitearch}:%{buildroot}%{python3_sitelib}` |
||||||
|
- If not defined, sets `$TOX_TESTENV_PASSENV` to `*` |
||||||
|
- Runs `tox` with `-q` (quiet), `--recreate` and `--current-env` (from [tox-current-env]) flags |
||||||
|
- Implicitly uses the tox environment name stored in `%{toxenv}` - as overridden by `%pyproject_buildrequires -e` |
||||||
|
|
||||||
|
By using the `-e` flag, you can use a different tox environment(s): |
||||||
|
|
||||||
|
%check |
||||||
|
%tox |
||||||
|
%if %{with integration_test} |
||||||
|
%tox -e %{default_toxenv}-integration |
||||||
|
%endif |
||||||
|
|
||||||
|
If you wish to provide custom `tox` flags or arguments, add them after `--`: |
||||||
|
|
||||||
|
%tox -- --flag-for-tox |
||||||
|
|
||||||
|
If you wish to pass custom `posargs` to tox, use another `--`: |
||||||
|
|
||||||
|
%tox -- --flag-for-tox -- --flag-for-posargs |
||||||
|
|
||||||
|
Or (note the two sequential `--`s): |
||||||
|
|
||||||
|
%tox -- -- --flag-for-posargs |
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Generating the %files section |
||||||
|
----------------------------- |
||||||
|
|
||||||
|
To generate the list of files in the `%files` section, you can use `%pyproject_save_files` after the `%pyproject_install` macro. |
||||||
|
It takes toplevel module names (i.e. the names used with `import` in Python) and stores paths for those modules and metadata for the package (dist-info directory) to a file stored at `%{pyproject_files}`. |
||||||
|
For example, if a package provides the modules `requests` and `_requests`, write: |
||||||
|
|
||||||
|
%install |
||||||
|
%pyproject_install |
||||||
|
%pyproject_save_files requests _requests |
||||||
|
|
||||||
|
To add listed files to the `%files` section, use `%files -f %{pyproject_files}`. |
||||||
|
Note that you still need to add any documentation manually (for now). |
||||||
|
|
||||||
|
%files -n python3-requests -f %{pyproject_files} |
||||||
|
%doc README.rst |
||||||
|
|
||||||
|
You can use globs in the module names if listing them explicitly would be too tedious: |
||||||
|
|
||||||
|
%install |
||||||
|
%pyproject_install |
||||||
|
%pyproject_save_files '*requests' |
||||||
|
|
||||||
|
In fully automated environments, you can use the `*` glob to include all modules (put it in single quotes to prevent Shell from expanding it). In Fedora however, you should always use a more specific glob to avoid accidentally packaging unwanted files (for example, a top level module named `test`). |
||||||
|
|
||||||
|
Speaking about automated environments, some files cannot be classified with `%pyproject_save_files`, but it is possible to list all unclassified files by adding a special `+auto` argument. |
||||||
|
|
||||||
|
%install |
||||||
|
%pyproject_install |
||||||
|
%pyproject_save_files '*' +auto |
||||||
|
|
||||||
|
%files -n python3-requests -f %{pyproject_files} |
||||||
|
|
||||||
|
However, in Fedora packages, always list executables explicitly to avoid unintended collisions with other packages or accidental missing executables: |
||||||
|
|
||||||
|
%install |
||||||
|
%pyproject_install |
||||||
|
%pyproject_save_files requests _requests |
||||||
|
|
||||||
|
%files -n python3-requests -f %{pyproject_files} |
||||||
|
%doc README.rst |
||||||
|
%{_bindir}/downloader |
||||||
|
|
||||||
|
`%pyproject_save_files` can automatically mark license files with `%license` macro |
||||||
|
and language (`*.mo`) files with `%lang` macro and appropriate language code. |
||||||
|
Only license files declared via [PEP 639] `License-Field` field are detected. |
||||||
|
[PEP 639] is still a draft and can be changed in the future. |
||||||
|
|
||||||
|
Note that `%pyproject_save_files` uses data from the [RECORD file](https://www.python.org/dev/peps/pep-0627/). |
||||||
|
If you wish to rename, remove or otherwise change the installed files of a package |
||||||
|
*after* `%pyproject_install`, `%pyproject_save_files` might break. |
||||||
|
If possible, remove/rename such files in `%prep`. |
||||||
|
If not possible, avoid using `%pyproject_save_files` or edit/replace `%{pyproject_files}`. |
||||||
|
|
||||||
|
|
||||||
|
Performing an import check on all importable modules |
||||||
|
---------------------------------------------------- |
||||||
|
|
||||||
|
If the upstream test suite cannot be used during the package build |
||||||
|
and you use `%pyproject_save_files`, |
||||||
|
you can benefit from the `%pyproject_check_import` macro. |
||||||
|
If `%pyproject_save_files` is not used, calling `%pyproject_check_import` will fail. |
||||||
|
|
||||||
|
When `%pyproject_save_files` is invoked, |
||||||
|
it creates a list of all valid and public (i.e. not starting with `_`) |
||||||
|
importable module names found in the package. |
||||||
|
This list is then usable by `%pyproject_check_import` which performs an import check for each listed module. |
||||||
|
When a module fails to import, the build fails. |
||||||
|
|
||||||
|
The modules are imported from both installed and buildroot's `%{python3_sitearch}` |
||||||
|
and `%{python3_sitelib}`, not from the current directory. |
||||||
|
|
||||||
|
Use the macro in `%check`: |
||||||
|
|
||||||
|
%check |
||||||
|
%pyproject_check_import |
||||||
|
|
||||||
|
By using the `-e` flag, you can exclude module names matching the given glob(s) from the import check |
||||||
|
(put it in single quotes to prevent Shell from expanding it). |
||||||
|
The flag can be used repeatedly. |
||||||
|
For example, to exclude all submodules ending with `config` and all submodules starting with `test`, you can use: |
||||||
|
|
||||||
|
%pyproject_check_import -e '*.config' -e '*.test*' |
||||||
|
|
||||||
|
There must be at least one module left for the import check; |
||||||
|
if, as a result of greedy excluding, no modules are left to check, the check fails. |
||||||
|
|
||||||
|
When the `-t` flag is used, only top-level modules are checked, |
||||||
|
qualified module names with a dot (`.`) are excluded. |
||||||
|
If the modules detected by `%pyproject_save_files` are `requests`, `requests.models`, and `requests.packages`, this will only perform an import of `requests`: |
||||||
|
|
||||||
|
%pyproject_check_import -t |
||||||
|
|
||||||
|
The modifying flags should only be used when there is a valid reason for not checking all available modules. |
||||||
|
The reason should be documented in a comment. |
||||||
|
|
||||||
|
The `%pyproject_check_import` macro also accepts positional arguments with |
||||||
|
additional qualified module names to check, useful for example if some modules are installed manually. |
||||||
|
Note that filtering by `-t`/`-e` also applies to the positional arguments. |
||||||
|
|
||||||
|
|
||||||
|
Generating Extras subpackages |
||||||
|
----------------------------- |
||||||
|
|
||||||
|
The `%pyproject_extras_subpkg` macro generates simple subpackage(s) |
||||||
|
for Python extras. |
||||||
|
|
||||||
|
The macro should be placed after the base package's `%description` to avoid |
||||||
|
issues in building the SRPM. |
||||||
|
|
||||||
|
For example, if the `requests` project's metadata defines the extras |
||||||
|
`security` and `socks`, the following invocation will generate the subpackage |
||||||
|
`python3-requests+security` that provides `python3dist(requests[security])`, |
||||||
|
and a similar one for `socks`. |
||||||
|
|
||||||
|
%pyproject_extras_subpkg -n python3-requests security socks |
||||||
|
|
||||||
|
The macro works like `%python_extras_subpkg`, |
||||||
|
except the `-i`/`-f`/`-F` arguments are optional and discouraged. |
||||||
|
A filelist written by `%pyproject_install` is used by default. |
||||||
|
For more information on `%python_extras_subpkg`, see the [Fedora change]. |
||||||
|
|
||||||
|
[Fedora change]: https://fedoraproject.org/wiki/Changes/PythonExtras |
||||||
|
|
||||||
|
These arguments are still required: |
||||||
|
|
||||||
|
* -n: name of the “base” package (e.g. python3-requests) |
||||||
|
* Positional arguments: the extra name(s). |
||||||
|
Multiple subpackages are generated when multiple names are provided. |
||||||
|
|
||||||
|
|
||||||
|
PROVISIONAL: Importing just-built (extension) modules in %build |
||||||
|
--------------------------------------------------------------- |
||||||
|
|
||||||
|
Sometimes, it is desired to be able to import the *just-built* extension modules |
||||||
|
in the `%build` section, e.g. to build the documentation with Sphinx. |
||||||
|
|
||||||
|
%build |
||||||
|
%pyproject_wheel |
||||||
|
... build the docs here ... |
||||||
|
|
||||||
|
With pure Python packages, it might be possible to set `PYTHONPATH=${PWD}` or `PYTHONPATH=${PWD}/src`. |
||||||
|
However, it is a bit more complicated with extension modules. |
||||||
|
|
||||||
|
The location of just-built modules might differ depending on Python version, architecture, pip version, etc. |
||||||
|
Hence, the macro `%{pyproject_build_lib}` exists to be used like this: |
||||||
|
|
||||||
|
%build |
||||||
|
%pyproject_wheel |
||||||
|
PYTHONPATH=%{pyproject_build_lib} ... build the docs here ... |
||||||
|
|
||||||
|
This macro is currently **provisional** and the behavior might change. |
||||||
|
Please subscribe to Fedora's [python-devel list] if you use the macro. |
||||||
|
|
||||||
|
The `%{pyproject_build_lib}` macro expands to an Shell `$(...)` expression and does not work when put into single quotes (`'`). |
||||||
|
|
||||||
|
Depending on the pip version, the expanded value will differ: |
||||||
|
|
||||||
|
[python-devel list]: https://lists.fedoraproject.org/archives/list/python-devel@lists.fedoraproject.org/ |
||||||
|
|
||||||
|
### New pip 21.3+ with in-tree-build and setuptools 62.1+ (Fedora 37+) |
||||||
|
|
||||||
|
Always use the macro from the same directory where you called `%pyproject_wheel` from. |
||||||
|
The value will expand to something like: |
||||||
|
|
||||||
|
* `/builddir/build/BUILD/%{name}-%{version}/build/lib.linux-x86_64-cpython-311` for wheels with extension modules |
||||||
|
* `/builddir/build/BUILD/%{name}-%{version}/build/lib` for pure Python wheels |
||||||
|
|
||||||
|
If multiple wheels were built from the same directory, |
||||||
|
some pure Python and some with extension modules, |
||||||
|
the expanded value will be combined with `:`: |
||||||
|
|
||||||
|
* `/builddir/build/BUILD/%{name}-%{version}/build/lib.linux-x86_64-cypthon-311:/builddir/build/BUILD/%{name}-%{version}/build/lib` |
||||||
|
|
||||||
|
If multiple wheels were built from different directories, |
||||||
|
the value will differ depending on the current directory. |
||||||
|
|
||||||
|
|
||||||
|
### New pip 21.3+ with in-tree-build and older setuptools (Fedora 36) |
||||||
|
|
||||||
|
Always use the macro from the same directory where you called `%pyproject_wheel` from. |
||||||
|
The value will expand to something like: |
||||||
|
|
||||||
|
* `/builddir/build/BUILD/%{name}-%{version}/build/lib.linux-x86_64-3.10` for wheels with extension modules |
||||||
|
* `/builddir/build/BUILD/%{name}-%{version}/build/lib` for pure Python wheels |
||||||
|
|
||||||
|
If multiple wheels were built from the same directory, |
||||||
|
some pure Python and some with extension modules, |
||||||
|
the expanded value will be combined with `:`: |
||||||
|
|
||||||
|
* `/builddir/build/BUILD/%{name}-%{version}/build/lib.linux-x86_64-3.10:/builddir/build/BUILD/%{name}-%{version}/build/lib` |
||||||
|
|
||||||
|
If multiple wheels were built from different directories, |
||||||
|
the value will differ depending on the current directory. |
||||||
|
|
||||||
|
|
||||||
|
### Older pip with out-of-tree-build (Fedora 35 and EL 9) |
||||||
|
|
||||||
|
The value will expand to something like: |
||||||
|
|
||||||
|
* `/builddir/build/BUILD/%{name}-%{version}/.pyproject-builddir/pip-req-build-xxxxxxxx/build/lib.linux-x86_64-3.10` for wheels with extension modules |
||||||
|
* `/builddir/build/BUILD/%{name}-%{version}/.pyproject-builddir/pip-req-build-xxxxxxxx/build/lib` for pure Python wheels |
||||||
|
|
||||||
|
Note that the exact value is **not stable** between builds |
||||||
|
(the `xxxxxxxx` part is randomly generated, |
||||||
|
neither you should consider the `.pyproject-builddir` directory to remain stable). |
||||||
|
|
||||||
|
If multiple wheels are built, |
||||||
|
the expanded value will always be combined with `:` regardless of the current directory, e.g.: |
||||||
|
|
||||||
|
* `/builddir/build/BUILD/%{name}-%{version}/.pyproject-builddir/pip-req-build-xxxxxxxx/build/lib.linux-x86_64-3.10:/builddir/build/BUILD/%{name}-%{version}/.pyproject-builddir/pip-req-build-yyyyyyyy/build/lib.linux-x86_64-3.10:/builddir/build/BUILD/%{name}-%{version}/.pyproject-builddir/pip-req-build-zzzzzzzz/build/lib` |
||||||
|
|
||||||
|
**Note:** If you manage to build some wheels with in-tree-build and some with out-of-tree-build option, |
||||||
|
the expanded value will contain all relevant directories. |
||||||
|
|
||||||
|
|
||||||
|
Limitations |
||||||
|
----------- |
||||||
|
|
||||||
|
`%pyproject_install` changes shebang lines of every Python script in `%{buildroot}%{_bindir}` to `#!%{__python3} %{py3_shbang_opt}` (`#!/usr/bin/python3 -s`). |
||||||
|
Existing Python flags in shebangs are preserved. |
||||||
|
For example `#!/usr/bin/python3 -Ru` will be updated to `#!/usr/bin/python3 -sRu`. |
||||||
|
Sometimes, this can interfere with tests that run such scripts directly by name, |
||||||
|
because in tests we usually rely on `PYTHONPATH` (and `-s` ignores that). |
||||||
|
Would this behavior be undesired for any reason, |
||||||
|
undefine `%{py3_shbang_opt}` to turn it off. |
||||||
|
|
||||||
|
Some valid Python version specifiers are not supported. |
||||||
|
|
||||||
|
When a dependency is specified via an URL or local path, for example as: |
||||||
|
|
||||||
|
https://github.com/ActiveState/appdirs/archive/8eacfa312d77aba28d483fbfb6f6fc54099622be.zip |
||||||
|
/some/path/foo-1.2.3.tar.gz |
||||||
|
git+https://github.com/sphinx-doc/sphinx.git@96dbe5e3 |
||||||
|
|
||||||
|
The `%pyproject_buildrequires` macro is unable to convert it to an appropriate RPM requirement and will fail. |
||||||
|
If the URL contains the `packageName @` prefix as specified in [PEP 508], |
||||||
|
the requirement will be generated without a version constraint: |
||||||
|
|
||||||
|
appdirs@https://github.com/ActiveState/appdirs/archive/8eacfa312d77aba28d483fbfb6f6fc54099622be.zip |
||||||
|
foo@file:///some/path/foo-1.2.3.tar.gz |
||||||
|
|
||||||
|
Will be converted to: |
||||||
|
|
||||||
|
python3dist(appdirs) |
||||||
|
python3dist(foo) |
||||||
|
|
||||||
|
Alternatively, when an URL requirement parsed from a text file |
||||||
|
given as positional argument to `%pyproject_buildrequires` |
||||||
|
contains the `#egg=packageName` fragment, |
||||||
|
as documented in [pip's documentation]: |
||||||
|
|
||||||
|
git+https://github.com/sphinx-doc/sphinx.git@96dbe5e3#egg=sphinx |
||||||
|
|
||||||
|
The requirements will be converted to package names without versions, e.g.: |
||||||
|
|
||||||
|
python3dist(sphinx) |
||||||
|
|
||||||
|
However upstreams usually only use direct URLs for their requirements as workarounds, |
||||||
|
so be prepared for problems. |
||||||
|
|
||||||
|
[PEP 508]: https://www.python.org/dev/peps/pep-0508/ |
||||||
|
[PEP 517]: https://www.python.org/dev/peps/pep-0517/ |
||||||
|
[PEP 518]: https://www.python.org/dev/peps/pep-0518/ |
||||||
|
[PEP 639]: https://www.python.org/dev/peps/pep-0639/ |
||||||
|
[pip's documentation]: https://pip.pypa.io/en/stable/cli/pip_install/#vcs-support |
||||||
|
|
||||||
|
|
||||||
|
Testing the macros |
||||||
|
------------------ |
||||||
|
|
||||||
|
This repository has two kinds of tests. |
||||||
|
First, there is RPM `%check` section, run when building the `python-rpm-macros` |
||||||
|
package. |
||||||
|
|
||||||
|
Then there are CI tests. |
||||||
|
There is currently [no way to run Fedora CI tests locally][ci-rfe], |
||||||
|
but you can do what the tests do manually using mock. |
||||||
|
For each `$PKG.spec` in `tests/`: |
||||||
|
|
||||||
|
- clean your mock environment: |
||||||
|
|
||||||
|
mock -r fedora-rawhide-x86_64 clean |
||||||
|
|
||||||
|
- install the version of `python-rpm-macros` you're testing, e.g.: |
||||||
|
|
||||||
|
mock -r fedora-rawhide-x86_64 install .../python-rpm-macros-*.noarch.rpm |
||||||
|
|
||||||
|
- download the sources: |
||||||
|
|
||||||
|
spectool -g -R $PKG.spec |
||||||
|
|
||||||
|
- build a SRPM: |
||||||
|
|
||||||
|
rpmbuild -bs $PKG.spec |
||||||
|
|
||||||
|
- build in mock, using the path from the command above as `$SRPM`: |
||||||
|
|
||||||
|
mock -r fedora-rawhide-x86_64 -n -N $SRPM |
||||||
|
|
||||||
|
[ci-rfe]: https://pagure.io/fedora-ci/general/issue/4 |
@ -0,0 +1,55 @@ |
|||||||
|
"""Checks if all *.pyc files have later mtime than their *.py files.""" |
||||||
|
|
||||||
|
import os |
||||||
|
import sys |
||||||
|
from importlib.util import cache_from_source |
||||||
|
from pathlib import Path |
||||||
|
|
||||||
|
|
||||||
|
RPM_BUILD_ROOT = os.environ.get('RPM_BUILD_ROOT', '') |
||||||
|
|
||||||
|
# ...cpython-3X.pyc |
||||||
|
# ...cpython-3X.opt-1.pyc |
||||||
|
# ...cpython-3X.opt-2.pyc |
||||||
|
LEVELS = (None, 1, 2) |
||||||
|
|
||||||
|
# list of globs of test and other files that we expect not to have bytecode |
||||||
|
not_compiled = [ |
||||||
|
'/usr/bin/*', |
||||||
|
'*/test/bad_coding.py', |
||||||
|
'*/test/bad_coding2.py', |
||||||
|
'*/test/badsyntax_*.py', |
||||||
|
'*/lib2to3/tests/data/bom.py', |
||||||
|
'*/lib2to3/tests/data/crlf.py', |
||||||
|
'*/lib2to3/tests/data/different_encoding.py', |
||||||
|
'*/lib2to3/tests/data/false_encoding.py', |
||||||
|
'*/lib2to3/tests/data/py2_test_grammar.py', |
||||||
|
'*.debug-gdb.py', |
||||||
|
] |
||||||
|
|
||||||
|
|
||||||
|
def bytecode_expected(path): |
||||||
|
path = Path(path[len(RPM_BUILD_ROOT):]) |
||||||
|
for glob in not_compiled: |
||||||
|
if path.match(glob): |
||||||
|
return False |
||||||
|
return True |
||||||
|
|
||||||
|
|
||||||
|
failed = 0 |
||||||
|
compiled = (path for path in sys.argv[1:] if bytecode_expected(path)) |
||||||
|
for path in compiled: |
||||||
|
to_check = (cache_from_source(path, optimization=opt) for opt in LEVELS) |
||||||
|
f_mtime = os.path.getmtime(path) |
||||||
|
for pyc in to_check: |
||||||
|
c_mtime = os.path.getmtime(pyc) |
||||||
|
if c_mtime < f_mtime: |
||||||
|
print('Failed bytecompilation timestamps check: ' |
||||||
|
f'Bytecode file {pyc} is older than source file {path}', |
||||||
|
file=sys.stderr) |
||||||
|
failed += 1 |
||||||
|
|
||||||
|
if failed: |
||||||
|
print(f'\n{failed} files failed bytecompilation timestamps check.', |
||||||
|
file=sys.stderr) |
||||||
|
sys.exit(1) |
@ -0,0 +1,83 @@ |
|||||||
|
'''Check whether the manpage extensions and directories list hardcoded in brp-compress |
||||||
|
are the same as the lists stored in pyproject_save_files.py. |
||||||
|
There is an open issue for RPM to provide them both as macros: |
||||||
|
https://github.com/rpm-software-management/rpm/issues/1865 |
||||||
|
Once that happens, this script can be removed. |
||||||
|
''' |
||||||
|
|
||||||
|
import argparse |
||||||
|
import re |
||||||
|
import sys |
||||||
|
|
||||||
|
from pathlib import PosixPath |
||||||
|
|
||||||
|
from pyproject_buildrequires import print_err |
||||||
|
from pyproject_save_files import prepend_mandirs, MANPAGE_EXTENSIONS |
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def read_brp_compress(filename): |
||||||
|
|
||||||
|
contents = filename.read_text() |
||||||
|
# To avoid duplicity of the manpage extensions which are listed a few times |
||||||
|
# in the source file, they are stored in set and then retyped to a sorted list |
||||||
|
manpage_exts = sorted( |
||||||
|
set(re.findall(r'\(?(\w+)\\+\)?\$?', contents)) |
||||||
|
) |
||||||
|
|
||||||
|
# Get rid of ${PREFIX} when extracting the manpage directories |
||||||
|
mandirs = [ |
||||||
|
entry.replace('.${PREFIX}', '/PREFIX') |
||||||
|
for entry in contents.split() |
||||||
|
if entry.startswith('.${PREFIX}') |
||||||
|
] |
||||||
|
|
||||||
|
return manpage_exts, sorted(mandirs) |
||||||
|
|
||||||
|
|
||||||
|
def compare_mandirs(brp_compress_mandirs): |
||||||
|
''' |
||||||
|
Check whether each of brp-compress mandirs entry is present in the list |
||||||
|
stored in pyproject_save_files.py |
||||||
|
''' |
||||||
|
|
||||||
|
pyp_save_files_mandirs = sorted(prepend_mandirs(prefix='/PREFIX')) |
||||||
|
if brp_compress_mandirs == pyp_save_files_mandirs: |
||||||
|
return True |
||||||
|
else: |
||||||
|
print_err('Mandir lists don\'t match, update the list in pyproject_save_files.py') |
||||||
|
print_err('brp-compress list:', brp_compress_mandirs) |
||||||
|
print_err('pyproject_save_files list:', pyp_save_files_mandirs) |
||||||
|
return False |
||||||
|
|
||||||
|
|
||||||
|
def compare_manpage_extensions(brp_compress_manpage_exts): |
||||||
|
''' |
||||||
|
Check whether each of brp-compress manpage extension is present in the list |
||||||
|
stored in pyproject_save_files.py |
||||||
|
''' |
||||||
|
|
||||||
|
if brp_compress_manpage_exts == sorted(MANPAGE_EXTENSIONS): |
||||||
|
return True |
||||||
|
else: |
||||||
|
print_err('Manpage extension lists don\'t match, update the list in pyproject_save_files.py') |
||||||
|
print_err('brp-compress list:', brp_compress_manpage_exts) |
||||||
|
print_err('pyproject_save_files list:', sorted(MANPAGE_EXTENSIONS)) |
||||||
|
return False |
||||||
|
|
||||||
|
|
||||||
|
def main(args): |
||||||
|
src_manpage_exts, src_mandirs = read_brp_compress(args.filename) |
||||||
|
extension_check_successful = compare_manpage_extensions(src_manpage_exts) |
||||||
|
mandir_check_successful = compare_mandirs(src_mandirs) |
||||||
|
if extension_check_successful and mandir_check_successful: |
||||||
|
sys.exit(0) |
||||||
|
else: |
||||||
|
sys.exit(1) |
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__': |
||||||
|
parser = argparse.ArgumentParser() |
||||||
|
parser.add_argument('-f', '--filename', type=PosixPath, required=True, |
||||||
|
help='Provide location of brp-compress file') |
||||||
|
main(parser.parse_args()) |
@ -0,0 +1,36 @@ |
|||||||
|
From cacd6d2fa9a27b29415a4ce25d76406fe69fc398 Mon Sep 17 00:00:00 2001 |
||||||
|
From: Karolina Surma <ksurma@redhat.com> |
||||||
|
Date: Mon, 10 May 2021 16:38:50 +0200 |
||||||
|
Subject: [PATCH] Dummy certifi patch |
||||||
|
|
||||||
|
Co-Authored-By: Tomas Hrnciar <thrnciar@redhat.com> |
||||||
|
--- |
||||||
|
src/pip/_vendor/certifi/core.py | 5 ++--- |
||||||
|
1 file changed, 2 insertions(+), 3 deletions(-) |
||||||
|
|
||||||
|
diff --git a/src/pip/_vendor/certifi/core.py b/src/pip/_vendor/certifi/core.py |
||||||
|
index f34045b..a2ada08 100644 |
||||||
|
--- a/src/pip/_vendor/certifi/core.py |
||||||
|
+++ b/src/pip/_vendor/certifi/core.py |
||||||
|
@@ -14,6 +14,7 @@ class _PipPatchedCertificate(Exception): |
||||||
|
|
||||||
|
|
||||||
|
try: |
||||||
|
+ raise ImportError # force fallback |
||||||
|
# Return a certificate file on disk for a standalone pip zipapp running in |
||||||
|
# an isolated build environment to use. Passing --cert to the standalone |
||||||
|
# pip does not work since requests calls where() unconditionally on import. |
||||||
|
@@ -75,9 +76,7 @@ except ImportError: |
||||||
|
# If we don't have importlib.resources, then we will just do the old logic |
||||||
|
# of assuming we're on the filesystem and munge the path directly. |
||||||
|
def where() -> str: |
||||||
|
- f = os.path.dirname(__file__) |
||||||
|
- |
||||||
|
- return os.path.join(f, "cacert.pem") |
||||||
|
+ return '/etc/pki/tls/certs/ca-bundle.crt' |
||||||
|
|
||||||
|
|
||||||
|
def contents() -> str: |
||||||
|
-- |
||||||
|
2.35.3 |
||||||
|
|
@ -0,0 +1,185 @@ |
|||||||
|
# This is a directory where wheels are stored and installed from, absolute |
||||||
|
%_pyproject_wheeldir %{_builddir}%{?buildsubdir:/%{buildsubdir}}/pyproject-wheeldir |
||||||
|
|
||||||
|
# This is a directory used as TMPDIR, where pip copies sources to and builds from, relative to PWD |
||||||
|
# For proper debugsource packages, we create TMPDIR within PWD |
||||||
|
# See https://github.com/pypa/pip/issues/7555#issuecomment-595180864 |
||||||
|
# |
||||||
|
# This will be used in debugsource package paths (applies to extension modules only) |
||||||
|
# NB: pytest collects tests from here if not hidden |
||||||
|
# https://docs.pytest.org/en/latest/reference.html#confval-norecursedirs |
||||||
|
%_pyproject_builddir %{_builddir}%{?buildsubdir:/%{buildsubdir}}/.pyproject-builddir |
||||||
|
|
||||||
|
# We prefix all created files with this value to make them unique |
||||||
|
# Ideally, we would put them into %%{buildsubdir}, but that value changes during the spec |
||||||
|
# The used value is similar to the one used to define the default %%buildroot |
||||||
|
%_pyproject_files_prefix %{name}-%{version}-%{release}.%{_arch} |
||||||
|
|
||||||
|
%pyproject_files %{_builddir}/%{_pyproject_files_prefix}-pyproject-files |
||||||
|
%_pyproject_modules %{_builddir}/%{_pyproject_files_prefix}-pyproject-modules |
||||||
|
%_pyproject_ghost_distinfo %{_builddir}/%{_pyproject_files_prefix}-pyproject-ghost-distinfo |
||||||
|
%_pyproject_record %{_builddir}/%{_pyproject_files_prefix}-pyproject-record |
||||||
|
|
||||||
|
# Avoid leaking %%{_pyproject_builddir} to pytest collection |
||||||
|
# https://bugzilla.redhat.com/show_bug.cgi?id=1935212 |
||||||
|
# The value is read and used by the %%pytest and %%tox macros: |
||||||
|
%_set_pytest_addopts %global __pytest_addopts --ignore=%{_pyproject_builddir} |
||||||
|
|
||||||
|
%pyproject_wheel() %{expand:\\\ |
||||||
|
%_set_pytest_addopts |
||||||
|
mkdir -p "%{_pyproject_builddir}" |
||||||
|
CFLAGS="${CFLAGS:-${RPM_OPT_FLAGS}}" LDFLAGS="${LDFLAGS:-${RPM_LD_FLAGS}}" TMPDIR="%{_pyproject_builddir}" \\\ |
||||||
|
%{__python3} -Bs %{_rpmconfigdir}/redhat/pyproject_wheel.py %{_pyproject_wheeldir} |
||||||
|
} |
||||||
|
|
||||||
|
|
||||||
|
%pyproject_build_lib %{expand:\\\ |
||||||
|
$( |
||||||
|
pyproject_build_lib=() |
||||||
|
if [ -d build/lib.%{python3_platform}-cpython-%{python3_version_nodots} ]; then |
||||||
|
pyproject_build_lib+=( "${PWD}/build/lib.%{python3_platform}-cpython-%{python3_version_nodots}" ) |
||||||
|
fi |
||||||
|
if [ -d build/lib.%{python3_platform}-%{python3_version} ]; then |
||||||
|
pyproject_build_lib+=( "${PWD}/build/lib.%{python3_platform}-%{python3_version}" ) |
||||||
|
fi |
||||||
|
if [ -d build/lib ]; then |
||||||
|
pyproject_build_lib+=( "${PWD}/build/lib" ) |
||||||
|
fi |
||||||
|
for directory in $(find "%{_pyproject_builddir}" -type d -wholename "%{_pyproject_builddir}/pip-req-build-*/build/lib.%{python3_platform}-%{python3_version}" 2>/dev/null); do |
||||||
|
pyproject_build_lib+=( "${directory}" ) |
||||||
|
done |
||||||
|
for directory in $(find "%{_pyproject_builddir}" -type d -wholename "%{_pyproject_builddir}/pip-req-build-*/build/lib" 2>/dev/null); do |
||||||
|
pyproject_build_lib+=( "${directory}" ) |
||||||
|
done |
||||||
|
echo $(IFS=:; echo "${pyproject_build_lib[*]}") |
||||||
|
)} |
||||||
|
|
||||||
|
|
||||||
|
%pyproject_install() %{expand:\\\ |
||||||
|
specifier=$(ls %{_pyproject_wheeldir}/*.whl | xargs basename --multiple | sed -E 's/([^-]+)-([^-]+)-.+\\\.whl/\\\1==\\\2/') |
||||||
|
TMPDIR="%{_pyproject_builddir}" %{__python3} -m pip install --root %{buildroot} --prefix %{_prefix} --no-deps --disable-pip-version-check --progress-bar off --verbose --ignore-installed --no-warn-script-location --no-index --no-cache-dir --find-links %{_pyproject_wheeldir} $specifier |
||||||
|
if [ -d %{buildroot}%{_bindir} ]; then |
||||||
|
%py3_shebang_fix %{buildroot}%{_bindir}/* |
||||||
|
rm -rfv %{buildroot}%{_bindir}/__pycache__ |
||||||
|
fi |
||||||
|
rm -f %{_pyproject_ghost_distinfo} |
||||||
|
site_dirs=() |
||||||
|
# Process %%{python3_sitelib} if exists |
||||||
|
if [ -d %{buildroot}%{python3_sitelib} ]; then |
||||||
|
site_dirs+=( "%{python3_sitelib}" ) |
||||||
|
fi |
||||||
|
# Process %%{python3_sitearch} if exists and does not equal to %%{python3_sitelib} |
||||||
|
if [ %{buildroot}%{python3_sitearch} != %{buildroot}%{python3_sitelib} ] && [ -d %{buildroot}%{python3_sitearch} ]; then |
||||||
|
site_dirs+=( "%{python3_sitearch}" ) |
||||||
|
fi |
||||||
|
# Process all *.dist-info dirs in sitelib/sitearch |
||||||
|
for site_dir in ${site_dirs[@]}; do |
||||||
|
for distinfo in %{buildroot}$site_dir/*.dist-info; do |
||||||
|
echo "%ghost ${distinfo#%{buildroot}}" >> %{_pyproject_ghost_distinfo} |
||||||
|
sed -i 's/pip/rpm/' ${distinfo}/INSTALLER |
||||||
|
PYTHONPATH=%{_rpmconfigdir}/redhat \\ |
||||||
|
%{__python3} -B %{_rpmconfigdir}/redhat/pyproject_preprocess_record.py \\ |
||||||
|
--buildroot %{buildroot} --record ${distinfo}/RECORD --output %{_pyproject_record} |
||||||
|
rm -fv ${distinfo}/RECORD |
||||||
|
rm -fv ${distinfo}/REQUESTED |
||||||
|
done |
||||||
|
done |
||||||
|
lines=$(wc -l %{_pyproject_ghost_distinfo} | cut -f1 -d" ") |
||||||
|
if [ $lines -ne 1 ]; then |
||||||
|
echo -e "\\n\\nWARNING: %%%%pyproject_extras_subpkg won't work without explicit -i or -F, found $lines dist-info directories.\\n\\n" >&2 |
||||||
|
rm %{_pyproject_ghost_distinfo} # any attempt to use this will fail |
||||||
|
fi |
||||||
|
} |
||||||
|
|
||||||
|
|
||||||
|
# Note: the three times nested questionmarked -i -f -F pattern means: If none of those options was used -- in that case, we inject our own -f |
||||||
|
%pyproject_extras_subpkg(n:i:f:F) %{expand:%{?python_extras_subpkg:%{python_extras_subpkg%{?!-i:%{?!-f:%{?!-F: -f %{_pyproject_ghost_distinfo}}}} %**}}} |
||||||
|
|
||||||
|
|
||||||
|
%pyproject_save_files() %{expand:\\\ |
||||||
|
%{__python3} %{_rpmconfigdir}/redhat/pyproject_save_files.py \\ |
||||||
|
--output-files "%{pyproject_files}" \\ |
||||||
|
--output-modules "%{_pyproject_modules}" \\ |
||||||
|
--buildroot "%{buildroot}" \\ |
||||||
|
--sitelib "%{python3_sitelib}" \\ |
||||||
|
--sitearch "%{python3_sitearch}" \\ |
||||||
|
--python-version "%{python3_version}" \\ |
||||||
|
--pyproject-record "%{_pyproject_record}" \\ |
||||||
|
--prefix "%{_prefix}" \\ |
||||||
|
%{*} |
||||||
|
} |
||||||
|
|
||||||
|
# -t - Process only top-level modules |
||||||
|
# -e - Exclude the module names matching given glob, may be used repeatedly |
||||||
|
%pyproject_check_import(e:t) %{expand:\\\ |
||||||
|
if [ ! -f "%{_pyproject_modules}" ]; then |
||||||
|
echo 'ERROR: %%%%pyproject_check_import only works when %%%%pyproject_save_files is used' >&2 |
||||||
|
exit 1 |
||||||
|
fi |
||||||
|
%py3_check_import -f "%{_pyproject_modules}" %{?**} |
||||||
|
} |
||||||
|
|
||||||
|
|
||||||
|
%default_toxenv py%{python3_version_nodots} |
||||||
|
%toxenv %{default_toxenv} |
||||||
|
|
||||||
|
|
||||||
|
%pyproject_buildrequires(rRxtNwe:) %{expand:\\\ |
||||||
|
%_set_pytest_addopts |
||||||
|
# The _auto_set_build_flags feature does not do this in %%generate_buildrequires section, |
||||||
|
# but we want to get an environment consistent with %%build: |
||||||
|
%{?_auto_set_build_flags:%set_build_flags} |
||||||
|
# The default flags expect the package note file to exist |
||||||
|
# see https://bugzilla.redhat.com/show_bug.cgi?id=2097535 |
||||||
|
%{?_package_note_flags:%_generate_package_note_file} |
||||||
|
%{-R: |
||||||
|
%{-r:%{error:The -R and -r options are mutually exclusive}} |
||||||
|
%{-w:%{error:The -R and -w options are mutually exclusive}} |
||||||
|
} |
||||||
|
%{-N: |
||||||
|
%{-r:%{error:The -N and -r options are mutually exclusive}} |
||||||
|
%{-x:%{error:The -N and -x options are mutually exclusive}} |
||||||
|
%{-e:%{error:The -N and -e options are mutually exclusive}} |
||||||
|
%{-t:%{error:The -N and -t options are mutually exclusive}} |
||||||
|
%{-w:%{error:The -N and -w options are mutually exclusive}} |
||||||
|
} |
||||||
|
%{-e:%{expand:%global toxenv %(%{__python3} -s %{_rpmconfigdir}/redhat/pyproject_construct_toxenv.py %{?**})}} |
||||||
|
echo 'pyproject-rpm-macros' # we already have this installed, but this way, it's repoqueryable |
||||||
|
echo 'python%{python3_pkgversion}-devel' |
||||||
|
echo 'python%{python3_pkgversion}dist(pip) >= 19' |
||||||
|
echo 'python%{python3_pkgversion}dist(packaging)' |
||||||
|
%{!-N:if [ -f pyproject.toml ]; then |
||||||
|
%["%{python3_pkgversion}" == "3" |
||||||
|
? "echo '(python%{python3_pkgversion}dist(toml) if python%{python3_pkgversion}-devel < 3.11)'" |
||||||
|
: "%[v"%{python3_pkgversion}" < v"3.11" |
||||||
|
? "echo 'python%{python3_pkgversion}dist(toml)'" |
||||||
|
: "true # will use tomllib, echo nothing" |
||||||
|
]" |
||||||
|
] |
||||||
|
elif [ -f setup.py ]; then |
||||||
|
# Note: If the default requirements change, also change them in the script! |
||||||
|
echo 'python%{python3_pkgversion}dist(setuptools) >= 40.8' |
||||||
|
echo 'python%{python3_pkgversion}dist(wheel)' |
||||||
|
else |
||||||
|
echo 'ERROR: Neither pyproject.toml nor setup.py found, consider using %%%%pyproject_buildrequires -N <requirements-file> if this is not a Python package.' >&2 |
||||||
|
exit 1 |
||||||
|
fi} |
||||||
|
# setuptools assumes no pre-existing dist-info |
||||||
|
rm -rfv *.dist-info/ >&2 |
||||||
|
if [ -f %{__python3} ]; then |
||||||
|
mkdir -p "%{_pyproject_builddir}" |
||||||
|
CFLAGS="${CFLAGS:-${RPM_OPT_FLAGS}}" LDFLAGS="${LDFLAGS:-${RPM_LD_FLAGS}}" TMPDIR="%{_pyproject_builddir}" \\\ |
||||||
|
RPM_TOXENV="%{toxenv}" HOSTNAME="rpmbuild" %{__python3} -Bs %{_rpmconfigdir}/redhat/pyproject_buildrequires.py %{?!_python_no_extras_requires:--generate-extras} --python3_pkgversion %{python3_pkgversion} --wheeldir %{_pyproject_wheeldir} %{?**} |
||||||
|
fi |
||||||
|
} |
||||||
|
|
||||||
|
|
||||||
|
%tox(e:) %{expand:\\\ |
||||||
|
TOX_TESTENV_PASSENV="${TOX_TESTENV_PASSENV:-*}" \\ |
||||||
|
PYTHONDONTWRITEBYTECODE=1 \\ |
||||||
|
PATH="%{buildroot}%{_bindir}:$PATH" \\ |
||||||
|
PYTHONPATH="${PYTHONPATH:-%{buildroot}%{python3_sitearch}:%{buildroot}%{python3_sitelib}}" \\ |
||||||
|
%{?__pytest_addopts:PYTEST_ADDOPTS="${PYTEST_ADDOPTS:-} %{__pytest_addopts}"} \\ |
||||||
|
HOSTNAME="rpmbuild" \\ |
||||||
|
%{__python3} -m tox --current-env -q --recreate -e "%{-e:%{-e*}}%{!-e:%{toxenv}}" %{?*} |
||||||
|
} |
@ -0,0 +1,16 @@ |
|||||||
|
diff --git a/src/pip/_vendor/packaging/version.py b/src/pip/_vendor/packaging/version.py |
||||||
|
index de9a09a..154e94d 100644 |
||||||
|
--- a/src/pip/_vendor/packaging/version.py |
||||||
|
+++ b/src/pip/_vendor/packaging/version.py |
||||||
|
@@ -108,11 +108,6 @@ class LegacyVersion(_BaseVersion): |
||||||
|
self._version = str(version) |
||||||
|
self._key = _legacy_cmpkey(self._version) |
||||||
|
|
||||||
|
- warnings.warn( |
||||||
|
- "Creating a LegacyVersion has been deprecated and will be " |
||||||
|
- "removed in the next major release", |
||||||
|
- DeprecationWarning, |
||||||
|
- ) |
||||||
|
|
||||||
|
def __str__(self) -> str: |
||||||
|
return self._version |
@ -0,0 +1,76 @@ |
|||||||
|
From 8dd3793d1bab226cec9c5c49b01718a9634bc403 Mon Sep 17 00:00:00 2001 |
||||||
|
From: Karolina Surma <ksurma@redhat.com> |
||||||
|
Date: Mon, 10 May 2021 16:48:49 +0200 |
||||||
|
Subject: [PATCH] Don't warn the user about pip._internal.main() entrypoint |
||||||
|
|
||||||
|
In Fedora, we use that in ensurepip and users cannot do anything about it, |
||||||
|
this warning is juts moot. Also, the warning breaks CPython test suite. |
||||||
|
|
||||||
|
Co-Authored-By: =?UTF-8?q?Miro=20Hron=C4=8Dok?= <miro@hroncok.cz> |
||||||
|
--- |
||||||
|
src/pip/_internal/__init__.py | 2 +- |
||||||
|
src/pip/_internal/utils/entrypoints.py | 19 ++++++++++--------- |
||||||
|
tests/functional/test_cli.py | 3 ++- |
||||||
|
3 files changed, 13 insertions(+), 11 deletions(-) |
||||||
|
|
||||||
|
diff --git a/src/pip/_internal/__init__.py b/src/pip/_internal/__init__.py |
||||||
|
index 6afb5c6..faf25af 100755 |
||||||
|
--- a/src/pip/_internal/__init__.py |
||||||
|
+++ b/src/pip/_internal/__init__.py |
||||||
|
@@ -16,4 +16,4 @@ def main(args: (Optional[List[str]]) = None) -> int: |
||||||
|
""" |
||||||
|
from pip._internal.utils.entrypoints import _wrapper |
||||||
|
|
||||||
|
- return _wrapper(args) |
||||||
|
+ return _wrapper(args, _nowarn=True) |
||||||
|
diff --git a/src/pip/_internal/utils/entrypoints.py b/src/pip/_internal/utils/entrypoints.py |
||||||
|
index f292c64..2e29a5e 100644 |
||||||
|
--- a/src/pip/_internal/utils/entrypoints.py |
||||||
|
+++ b/src/pip/_internal/utils/entrypoints.py |
||||||
|
@@ -20,7 +20,7 @@ if WINDOWS: |
||||||
|
] |
||||||
|
|
||||||
|
|
||||||
|
-def _wrapper(args: Optional[List[str]] = None) -> int: |
||||||
|
+def _wrapper(args: Optional[List[str]] = None, _nowarn: bool = False) -> int: |
||||||
|
"""Central wrapper for all old entrypoints. |
||||||
|
|
||||||
|
Historically pip has had several entrypoints defined. Because of issues |
||||||
|
@@ -32,14 +32,15 @@ def _wrapper(args: Optional[List[str]] = None) -> int: |
||||||
|
directing them to an appropriate place for help, we now define all of |
||||||
|
our old entrypoints as wrappers for the current one. |
||||||
|
""" |
||||||
|
- sys.stderr.write( |
||||||
|
- "WARNING: pip is being invoked by an old script wrapper. This will " |
||||||
|
- "fail in a future version of pip.\n" |
||||||
|
- "Please see https://github.com/pypa/pip/issues/5599 for advice on " |
||||||
|
- "fixing the underlying issue.\n" |
||||||
|
- "To avoid this problem you can invoke Python with '-m pip' instead of " |
||||||
|
- "running pip directly.\n" |
||||||
|
- ) |
||||||
|
+ if not _nowarn: |
||||||
|
+ sys.stderr.write( |
||||||
|
+ "WARNING: pip is being invoked by an old script wrapper. This will " |
||||||
|
+ "fail in a future version of pip.\n" |
||||||
|
+ "Please see https://github.com/pypa/pip/issues/5599 for advice on " |
||||||
|
+ "fixing the underlying issue.\n" |
||||||
|
+ "To avoid this problem you can invoke Python with '-m pip' instead of " |
||||||
|
+ "running pip directly.\n" |
||||||
|
+ ) |
||||||
|
return main(args) |
||||||
|
|
||||||
|
|
||||||
|
diff --git a/tests/functional/test_cli.py b/tests/functional/test_cli.py |
||||||
|
index 3e85703..f86c392 100644 |
||||||
|
--- a/tests/functional/test_cli.py |
||||||
|
+++ b/tests/functional/test_cli.py |
||||||
|
@@ -43,4 +43,5 @@ def test_entrypoints_work(entrypoint: str, script: PipTestEnvironment) -> None: |
||||||
|
result = script.pip("-V") |
||||||
|
result2 = script.run("fake_pip", "-V", allow_stderr_warning=True) |
||||||
|
assert result.stdout == result2.stdout |
||||||
|
- assert "old script wrapper" in result2.stderr |
||||||
|
+ if entrypoint[0] != "fake_pip = pip._internal:main": |
||||||
|
+ assert "old script wrapper" in result2.stderr |
||||||
|
-- |
||||||
|
2.35.3 |
||||||
|
|
@ -0,0 +1,27 @@ |
|||||||
|
--- /usr/bin/pip3 2019-11-12 17:37:34.793131862 +0100 |
||||||
|
+++ pip3 2019-11-12 17:40:42.014107134 +0100 |
||||||
|
@@ -2,7 +2,23 @@ |
||||||
|
# -*- coding: utf-8 -*- |
||||||
|
import re |
||||||
|
import sys |
||||||
|
-from pip._internal.cli.main import main |
||||||
|
+ |
||||||
|
+try: |
||||||
|
+ from pip._internal.cli.main import main |
||||||
|
+except ImportError: |
||||||
|
+ try: |
||||||
|
+ from pip._internal.main import main |
||||||
|
+ except ImportError: |
||||||
|
+ try: |
||||||
|
+ # If the user has downgraded pip, the above import will fail. |
||||||
|
+ # Let's try older methods of invoking it: |
||||||
|
+ |
||||||
|
+ # pip 19 uses this |
||||||
|
+ from pip._internal import main |
||||||
|
+ except ImportError: |
||||||
|
+ # older pip versions use this |
||||||
|
+ from pip import main |
||||||
|
+ |
||||||
|
if __name__ == '__main__': |
||||||
|
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) |
||||||
|
sys.exit(main()) |
@ -0,0 +1,519 @@ |
|||||||
|
import glob |
||||||
|
import io |
||||||
|
import os |
||||||
|
import sys |
||||||
|
import importlib.metadata |
||||||
|
import argparse |
||||||
|
import traceback |
||||||
|
import contextlib |
||||||
|
from io import StringIO |
||||||
|
import json |
||||||
|
import subprocess |
||||||
|
import re |
||||||
|
import tempfile |
||||||
|
import email.parser |
||||||
|
import pathlib |
||||||
|
import zipfile |
||||||
|
|
||||||
|
from pyproject_requirements_txt import convert_requirements_txt |
||||||
|
|
||||||
|
|
||||||
|
# Some valid Python version specifiers are not supported. |
||||||
|
# Allow only the forms we know we can handle. |
||||||
|
VERSION_RE = re.compile(r'[a-zA-Z0-9.-]+(\.\*)?') |
||||||
|
|
||||||
|
|
||||||
|
class EndPass(Exception): |
||||||
|
"""End current pass of generating requirements""" |
||||||
|
|
||||||
|
|
||||||
|
# nb: we don't use functools.partial to be able to use pytest's capsys |
||||||
|
# see https://github.com/pytest-dev/pytest/issues/8900 |
||||||
|
def print_err(*args, **kwargs): |
||||||
|
kwargs.setdefault('file', sys.stderr) |
||||||
|
print(*args, **kwargs) |
||||||
|
|
||||||
|
|
||||||
|
try: |
||||||
|
from packaging.requirements import Requirement, InvalidRequirement |
||||||
|
from packaging.utils import canonicalize_name |
||||||
|
except ImportError as e: |
||||||
|
print_err('Import error:', e) |
||||||
|
# already echoed by the %pyproject_buildrequires macro |
||||||
|
sys.exit(0) |
||||||
|
|
||||||
|
# uses packaging, needs to be imported after packaging is verified to be present |
||||||
|
from pyproject_convert import convert |
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager |
||||||
|
def hook_call(): |
||||||
|
captured_out = StringIO() |
||||||
|
with contextlib.redirect_stdout(captured_out): |
||||||
|
yield |
||||||
|
for line in captured_out.getvalue().splitlines(): |
||||||
|
print_err('HOOK STDOUT:', line) |
||||||
|
|
||||||
|
|
||||||
|
def guess_reason_for_invalid_requirement(requirement_str): |
||||||
|
if ':' in requirement_str: |
||||||
|
message = ( |
||||||
|
'It might be an URL. ' |
||||||
|
'%pyproject_buildrequires cannot handle all URL-based requirements. ' |
||||||
|
'Add PackageName@ (see PEP 508) to the URL to at least require any version of PackageName.' |
||||||
|
) |
||||||
|
if '@' in requirement_str: |
||||||
|
message += ' (but note that URLs might not work well with other features)' |
||||||
|
return message |
||||||
|
if '/' in requirement_str: |
||||||
|
return ( |
||||||
|
'It might be a local path. ' |
||||||
|
'%pyproject_buildrequires cannot handle local paths as requirements. ' |
||||||
|
'Use an URL with PackageName@ (see PEP 508) to at least require any version of PackageName.' |
||||||
|
) |
||||||
|
# No more ideas |
||||||
|
return None |
||||||
|
|
||||||
|
|
||||||
|
class Requirements: |
||||||
|
"""Requirement printer""" |
||||||
|
def __init__(self, get_installed_version, extras=None, |
||||||
|
generate_extras=False, python3_pkgversion='3'): |
||||||
|
self.get_installed_version = get_installed_version |
||||||
|
self.extras = set() |
||||||
|
|
||||||
|
if extras: |
||||||
|
for extra in extras: |
||||||
|
self.add_extras(*extra.split(',')) |
||||||
|
|
||||||
|
self.missing_requirements = False |
||||||
|
|
||||||
|
self.generate_extras = generate_extras |
||||||
|
self.python3_pkgversion = python3_pkgversion |
||||||
|
|
||||||
|
def add_extras(self, *extras): |
||||||
|
self.extras |= set(e.strip() for e in extras) |
||||||
|
|
||||||
|
@property |
||||||
|
def marker_envs(self): |
||||||
|
if self.extras: |
||||||
|
return [{'extra': e} for e in sorted(self.extras)] |
||||||
|
return [{'extra': ''}] |
||||||
|
|
||||||
|
def evaluate_all_environamnets(self, requirement): |
||||||
|
for marker_env in self.marker_envs: |
||||||
|
if requirement.marker.evaluate(environment=marker_env): |
||||||
|
return True |
||||||
|
return False |
||||||
|
|
||||||
|
def add(self, requirement_str, *, source=None): |
||||||
|
"""Output a Python-style requirement string as RPM dep""" |
||||||
|
print_err(f'Handling {requirement_str} from {source}') |
||||||
|
|
||||||
|
try: |
||||||
|
requirement = Requirement(requirement_str) |
||||||
|
except InvalidRequirement: |
||||||
|
hint = guess_reason_for_invalid_requirement(requirement_str) |
||||||
|
message = f'Requirement {requirement_str!r} from {source} is invalid.' |
||||||
|
if hint: |
||||||
|
message += f' Hint: {hint}' |
||||||
|
raise ValueError(message) |
||||||
|
|
||||||
|
if requirement.url: |
||||||
|
print_err( |
||||||
|
f'WARNING: Simplifying {requirement_str!r} to {requirement.name!r}.' |
||||||
|
) |
||||||
|
|
||||||
|
name = canonicalize_name(requirement.name) |
||||||
|
if (requirement.marker is not None and |
||||||
|
not self.evaluate_all_environamnets(requirement)): |
||||||
|
print_err(f'Ignoring alien requirement:', requirement_str) |
||||||
|
return |
||||||
|
|
||||||
|
# We need to always accept pre-releases as satisfying the requirement |
||||||
|
# Otherwise e.g. installed cffi version 1.15.0rc2 won't even satisfy the requirement for "cffi" |
||||||
|
# https://bugzilla.redhat.com/show_bug.cgi?id=2014639#c3 |
||||||
|
requirement.specifier.prereleases = True |
||||||
|
|
||||||
|
try: |
||||||
|
# TODO: check if requirements with extras are satisfied |
||||||
|
installed = self.get_installed_version(requirement.name) |
||||||
|
except importlib.metadata.PackageNotFoundError: |
||||||
|
print_err(f'Requirement not satisfied: {requirement_str}') |
||||||
|
installed = None |
||||||
|
if installed and installed in requirement.specifier: |
||||||
|
print_err(f'Requirement satisfied: {requirement_str}') |
||||||
|
print_err(f' (installed: {requirement.name} {installed})') |
||||||
|
if requirement.extras: |
||||||
|
print_err(f' (extras are currently not checked)') |
||||||
|
else: |
||||||
|
self.missing_requirements = True |
||||||
|
|
||||||
|
if self.generate_extras: |
||||||
|
extra_names = [f'{name}[{extra.lower()}]' for extra in sorted(requirement.extras)] |
||||||
|
else: |
||||||
|
extra_names = [] |
||||||
|
|
||||||
|
for name in [name] + extra_names: |
||||||
|
together = [] |
||||||
|
for specifier in sorted( |
||||||
|
requirement.specifier, |
||||||
|
key=lambda s: (s.operator, s.version), |
||||||
|
): |
||||||
|
if not VERSION_RE.fullmatch(str(specifier.version)): |
||||||
|
raise ValueError( |
||||||
|
f'Unknown character in version: {specifier.version}. ' |
||||||
|
+ '(This might be a bug in pyproject-rpm-macros.)', |
||||||
|
) |
||||||
|
together.append(convert(python3dist(name, python3_pkgversion=self.python3_pkgversion), |
||||||
|
specifier.operator, specifier.version)) |
||||||
|
if len(together) == 0: |
||||||
|
print(python3dist(name, |
||||||
|
python3_pkgversion=self.python3_pkgversion)) |
||||||
|
elif len(together) == 1: |
||||||
|
print(together[0]) |
||||||
|
else: |
||||||
|
print(f"({' with '.join(together)})") |
||||||
|
|
||||||
|
def check(self, *, source=None): |
||||||
|
"""End current pass if any unsatisfied dependencies were output""" |
||||||
|
if self.missing_requirements: |
||||||
|
print_err(f'Exiting dependency generation pass: {source}') |
||||||
|
raise EndPass(source) |
||||||
|
|
||||||
|
def extend(self, requirement_strs, **kwargs): |
||||||
|
"""add() several requirements""" |
||||||
|
for req_str in requirement_strs: |
||||||
|
self.add(req_str, **kwargs) |
||||||
|
|
||||||
|
|
||||||
|
def toml_load(opened_binary_file): |
||||||
|
try: |
||||||
|
# tomllib is in the standard library since 3.11.0b1 |
||||||
|
import tomllib as toml_module |
||||||
|
load_from = opened_binary_file |
||||||
|
except ImportError: |
||||||
|
try: |
||||||
|
# note: we could use tomli here, |
||||||
|
# but for backwards compatibility with RHEL 9, we use toml instead |
||||||
|
import toml as toml_module |
||||||
|
load_from = io.TextIOWrapper(opened_binary_file, encoding='utf-8') |
||||||
|
except ImportError as e: |
||||||
|
print_err('Import error:', e) |
||||||
|
# already echoed by the %pyproject_buildrequires macro |
||||||
|
sys.exit(0) |
||||||
|
return toml_module.load(load_from) |
||||||
|
|
||||||
|
|
||||||
|
def get_backend(requirements): |
||||||
|
try: |
||||||
|
f = open('pyproject.toml', 'rb') |
||||||
|
except FileNotFoundError: |
||||||
|
pyproject_data = {} |
||||||
|
else: |
||||||
|
with f: |
||||||
|
pyproject_data = toml_load(f) |
||||||
|
|
||||||
|
buildsystem_data = pyproject_data.get('build-system', {}) |
||||||
|
requirements.extend( |
||||||
|
buildsystem_data.get('requires', ()), |
||||||
|
source='build-system.requires', |
||||||
|
) |
||||||
|
|
||||||
|
backend_name = buildsystem_data.get('build-backend') |
||||||
|
if not backend_name: |
||||||
|
# https://www.python.org/dev/peps/pep-0517/: |
||||||
|
# If the pyproject.toml file is absent, or the build-backend key is |
||||||
|
# missing, the source tree is not using this specification, and tools |
||||||
|
# should revert to the legacy behaviour of running setup.py |
||||||
|
# (either directly, or by implicitly invoking the [following] backend). |
||||||
|
# If setup.py is also not present program will mimick pip's behavior |
||||||
|
# and end with an error. |
||||||
|
if not os.path.exists('setup.py'): |
||||||
|
raise FileNotFoundError('File "setup.py" not found for legacy project.') |
||||||
|
backend_name = 'setuptools.build_meta:__legacy__' |
||||||
|
|
||||||
|
# Note: For projects without pyproject.toml, this was already echoed |
||||||
|
# by the %pyproject_buildrequires macro, but this also handles cases |
||||||
|
# with pyproject.toml without a specified build backend. |
||||||
|
# If the default requirements change, also change them in the macro! |
||||||
|
requirements.add('setuptools >= 40.8', source='default build backend') |
||||||
|
requirements.add('wheel', source='default build backend') |
||||||
|
|
||||||
|
requirements.check(source='build backend') |
||||||
|
|
||||||
|
backend_path = buildsystem_data.get('backend-path') |
||||||
|
if backend_path: |
||||||
|
# PEP 517 example shows the path as a list, but some projects don't follow that |
||||||
|
if isinstance(backend_path, str): |
||||||
|
backend_path = [backend_path] |
||||||
|
sys.path = backend_path + sys.path |
||||||
|
|
||||||
|
module_name, _, object_name = backend_name.partition(":") |
||||||
|
backend_module = importlib.import_module(module_name) |
||||||
|
|
||||||
|
if object_name: |
||||||
|
return getattr(backend_module, object_name) |
||||||
|
|
||||||
|
return backend_module |
||||||
|
|
||||||
|
|
||||||
|
def generate_build_requirements(backend, requirements): |
||||||
|
get_requires = getattr(backend, 'get_requires_for_build_wheel', None) |
||||||
|
if get_requires: |
||||||
|
with hook_call(): |
||||||
|
new_reqs = get_requires() |
||||||
|
requirements.extend(new_reqs, source='get_requires_for_build_wheel') |
||||||
|
requirements.check(source='get_requires_for_build_wheel') |
||||||
|
|
||||||
|
|
||||||
|
def requires_from_metadata_file(metadata_file): |
||||||
|
message = email.parser.Parser().parse(metadata_file, headersonly=True) |
||||||
|
return {k: message.get_all(k, ()) for k in ('Requires', 'Requires-Dist')} |
||||||
|
|
||||||
|
|
||||||
|
def generate_run_requirements_hook(backend, requirements): |
||||||
|
hook_name = 'prepare_metadata_for_build_wheel' |
||||||
|
prepare_metadata = getattr(backend, hook_name, None) |
||||||
|
if not prepare_metadata: |
||||||
|
raise ValueError( |
||||||
|
'The build backend cannot provide build metadata ' |
||||||
|
'(incl. runtime requirements) before build. ' |
||||||
|
'Use the provisional -w flag to build the wheel and parse the metadata from it, ' |
||||||
|
'or use the -R flag not to generate runtime dependencies.' |
||||||
|
) |
||||||
|
with hook_call(): |
||||||
|
dir_basename = prepare_metadata('.') |
||||||
|
with open(dir_basename + '/METADATA') as metadata_file: |
||||||
|
for key, requires in requires_from_metadata_file(metadata_file).items(): |
||||||
|
requirements.extend(requires, source=f'hook generated metadata: {key}') |
||||||
|
|
||||||
|
|
||||||
|
def find_built_wheel(wheeldir): |
||||||
|
wheels = glob.glob(os.path.join(wheeldir, '*.whl')) |
||||||
|
if not wheels: |
||||||
|
return None |
||||||
|
if len(wheels) > 1: |
||||||
|
raise RuntimeError('Found multiple wheels in %{_pyproject_wheeldir}, ' |
||||||
|
'this is not supported with %pyproject_buildrequires -w.') |
||||||
|
return wheels[0] |
||||||
|
|
||||||
|
|
||||||
|
def generate_run_requirements_wheel(backend, requirements, wheeldir): |
||||||
|
# Reuse the wheel from the previous round of %pyproject_buildrequires (if it exists) |
||||||
|
wheel = find_built_wheel(wheeldir) |
||||||
|
if not wheel: |
||||||
|
import pyproject_wheel |
||||||
|
returncode = pyproject_wheel.build_wheel(wheeldir=wheeldir, stdout=sys.stderr) |
||||||
|
if returncode != 0: |
||||||
|
raise RuntimeError('Failed to build the wheel for %pyproject_buildrequires -w.') |
||||||
|
wheel = find_built_wheel(wheeldir) |
||||||
|
if not wheel: |
||||||
|
raise RuntimeError('Cannot locate the built wheel for %pyproject_buildrequires -w.') |
||||||
|
|
||||||
|
print_err(f'Reading metadata from {wheel}') |
||||||
|
with zipfile.ZipFile(wheel) as wheelfile: |
||||||
|
for name in wheelfile.namelist(): |
||||||
|
if name.count('/') == 1 and name.endswith('.dist-info/METADATA'): |
||||||
|
with io.TextIOWrapper(wheelfile.open(name), encoding='utf-8') as metadata_file: |
||||||
|
for key, requires in requires_from_metadata_file(metadata_file).items(): |
||||||
|
requirements.extend(requires, source=f'built wheel metadata: {key}') |
||||||
|
break |
||||||
|
else: |
||||||
|
raise RuntimeError('Could not find *.dist-info/METADATA in built wheel.') |
||||||
|
|
||||||
|
|
||||||
|
def generate_run_requirements(backend, requirements, *, build_wheel, wheeldir): |
||||||
|
if build_wheel: |
||||||
|
generate_run_requirements_wheel(backend, requirements, wheeldir) |
||||||
|
else: |
||||||
|
generate_run_requirements_hook(backend, requirements) |
||||||
|
|
||||||
|
|
||||||
|
def generate_tox_requirements(toxenv, requirements): |
||||||
|
toxenv = ','.join(toxenv) |
||||||
|
requirements.add('tox-current-env >= 0.0.6', source='tox itself') |
||||||
|
requirements.check(source='tox itself') |
||||||
|
with tempfile.NamedTemporaryFile('r') as deps, \ |
||||||
|
tempfile.NamedTemporaryFile('r') as extras, \ |
||||||
|
tempfile.NamedTemporaryFile('r') as provision: |
||||||
|
r = subprocess.run( |
||||||
|
[sys.executable, '-m', 'tox', |
||||||
|
'--print-deps-to', deps.name, |
||||||
|
'--print-extras-to', extras.name, |
||||||
|
'--no-provision', provision.name, |
||||||
|
'-q', '-r', '-e', toxenv], |
||||||
|
check=False, |
||||||
|
encoding='utf-8', |
||||||
|
stdout=subprocess.PIPE, |
||||||
|
stderr=subprocess.STDOUT, |
||||||
|
) |
||||||
|
if r.stdout: |
||||||
|
print_err(r.stdout, end='') |
||||||
|
|
||||||
|
provision_content = provision.read() |
||||||
|
if provision_content and r.returncode != 0: |
||||||
|
provision_requires = json.loads(provision_content) |
||||||
|
if 'minversion' in provision_requires: |
||||||
|
requirements.add(f'tox >= {provision_requires["minversion"]}', |
||||||
|
source='tox provision (minversion)') |
||||||
|
if 'requires' in provision_requires: |
||||||
|
requirements.extend(provision_requires["requires"], |
||||||
|
source='tox provision (requires)') |
||||||
|
requirements.check(source='tox provision') # this terminates the script |
||||||
|
raise RuntimeError( |
||||||
|
'Dependencies requested by tox provisioning appear installed, ' |
||||||
|
'but tox disagreed.') |
||||||
|
else: |
||||||
|
r.check_returncode() |
||||||
|
|
||||||
|
deplines = deps.read().splitlines() |
||||||
|
packages = convert_requirements_txt(deplines) |
||||||
|
requirements.add_extras(*extras.read().splitlines()) |
||||||
|
requirements.extend(packages, |
||||||
|
source=f'tox --print-deps-only: {toxenv}') |
||||||
|
|
||||||
|
|
||||||
|
def python3dist(name, op=None, version=None, python3_pkgversion="3"): |
||||||
|
prefix = f"python{python3_pkgversion}dist" |
||||||
|
|
||||||
|
if op is None: |
||||||
|
if version is not None: |
||||||
|
raise AssertionError('op and version go together') |
||||||
|
return f'{prefix}({name})' |
||||||
|
else: |
||||||
|
return f'{prefix}({name}) {op} {version}' |
||||||
|
|
||||||
|
|
||||||
|
def generate_requires( |
||||||
|
*, include_runtime=False, build_wheel=False, wheeldir=None, toxenv=None, extras=None, |
||||||
|
get_installed_version=importlib.metadata.version, # for dep injection |
||||||
|
generate_extras=False, python3_pkgversion="3", requirement_files=None, use_build_system=True |
||||||
|
): |
||||||
|
"""Generate the BuildRequires for the project in the current directory |
||||||
|
|
||||||
|
This is the main Python entry point. |
||||||
|
""" |
||||||
|
requirements = Requirements( |
||||||
|
get_installed_version, extras=extras or [], |
||||||
|
generate_extras=generate_extras, |
||||||
|
python3_pkgversion=python3_pkgversion |
||||||
|
) |
||||||
|
|
||||||
|
try: |
||||||
|
if (include_runtime or toxenv) and not use_build_system: |
||||||
|
raise ValueError('-N option cannot be used in combination with -r, -e, -t, -x options') |
||||||
|
if requirement_files: |
||||||
|
for req_file in requirement_files: |
||||||
|
requirements.extend( |
||||||
|
convert_requirements_txt(req_file, pathlib.Path(req_file.name)), |
||||||
|
source=f'requirements file {req_file.name}' |
||||||
|
) |
||||||
|
requirements.check(source='all requirements files') |
||||||
|
if use_build_system: |
||||||
|
backend = get_backend(requirements) |
||||||
|
generate_build_requirements(backend, requirements) |
||||||
|
if toxenv: |
||||||
|
include_runtime = True |
||||||
|
generate_tox_requirements(toxenv, requirements) |
||||||
|
if include_runtime: |
||||||
|
generate_run_requirements(backend, requirements, build_wheel=build_wheel, wheeldir=wheeldir) |
||||||
|
except EndPass: |
||||||
|
return |
||||||
|
|
||||||
|
|
||||||
|
def main(argv): |
||||||
|
parser = argparse.ArgumentParser( |
||||||
|
description='Generate BuildRequires for a Python project.' |
||||||
|
) |
||||||
|
parser.add_argument( |
||||||
|
'-r', '--runtime', action='store_true', default=True, |
||||||
|
help='Generate run-time requirements (default, disable with -R)', |
||||||
|
) |
||||||
|
parser.add_argument( |
||||||
|
'-w', '--wheel', action='store_true', default=False, |
||||||
|
help=('Generate run-time requirements by building the wheel ' |
||||||
|
'(useful for build backends without the prepare_metadata_for_build_wheel hook)'), |
||||||
|
) |
||||||
|
parser.add_argument( |
||||||
|
'--wheeldir', metavar='PATH', default=None, |
||||||
|
help='The directory with wheel, used when -w.', |
||||||
|
) |
||||||
|
parser.add_argument( |
||||||
|
'-R', '--no-runtime', action='store_false', dest='runtime', |
||||||
|
help="Don't generate run-time requirements (implied by -N)", |
||||||
|
) |
||||||
|
parser.add_argument( |
||||||
|
'-e', '--toxenv', metavar='TOXENVS', action='append', |
||||||
|
help=('specify tox environments (comma separated and/or repeated)' |
||||||
|
'(implies --tox)'), |
||||||
|
) |
||||||
|
parser.add_argument( |
||||||
|
'-t', '--tox', action='store_true', |
||||||
|
help=('generate test tequirements from tox environment ' |
||||||
|
'(implies --runtime)'), |
||||||
|
) |
||||||
|
parser.add_argument( |
||||||
|
'-x', '--extras', metavar='EXTRAS', action='append', |
||||||
|
help='comma separated list of "extras" for runtime requirements ' |
||||||
|
'(e.g. -x testing,feature-x) (implies --runtime, can be repeated)', |
||||||
|
) |
||||||
|
parser.add_argument( |
||||||
|
'--generate-extras', action='store_true', |
||||||
|
help='Generate build requirements on Python Extras', |
||||||
|
) |
||||||
|
parser.add_argument( |
||||||
|
'-p', '--python3_pkgversion', metavar='PYTHON3_PKGVERSION', |
||||||
|
default="3", help=('Python version for pythonXdist()' |
||||||
|
'or pythonX.Ydist() requirements'), |
||||||
|
) |
||||||
|
parser.add_argument( |
||||||
|
'-N', '--no-use-build-system', dest='use_build_system', |
||||||
|
action='store_false', help='Use -N to indicate that project does not use any build system', |
||||||
|
) |
||||||
|
parser.add_argument( |
||||||
|
'requirement_files', nargs='*', type=argparse.FileType('r'), |
||||||
|
help=('Add buildrequires from file'), |
||||||
|
) |
||||||
|
|
||||||
|
args = parser.parse_args(argv) |
||||||
|
|
||||||
|
if not args.use_build_system: |
||||||
|
args.runtime = False |
||||||
|
|
||||||
|
if args.wheel: |
||||||
|
if not args.wheeldir: |
||||||
|
raise ValueError('--wheeldir must be set when -w.') |
||||||
|
|
||||||
|
if args.toxenv: |
||||||
|
args.tox = True |
||||||
|
|
||||||
|
if args.tox: |
||||||
|
args.runtime = True |
||||||
|
if not args.toxenv: |
||||||
|
_default = f'py{sys.version_info.major}{sys.version_info.minor}' |
||||||
|
args.toxenv = [os.getenv('RPM_TOXENV', _default)] |
||||||
|
|
||||||
|
if args.extras: |
||||||
|
args.runtime = True |
||||||
|
|
||||||
|
try: |
||||||
|
generate_requires( |
||||||
|
include_runtime=args.runtime, |
||||||
|
build_wheel=args.wheel, |
||||||
|
wheeldir=args.wheeldir, |
||||||
|
toxenv=args.toxenv, |
||||||
|
extras=args.extras, |
||||||
|
generate_extras=args.generate_extras, |
||||||
|
python3_pkgversion=args.python3_pkgversion, |
||||||
|
requirement_files=args.requirement_files, |
||||||
|
use_build_system=args.use_build_system, |
||||||
|
) |
||||||
|
except Exception: |
||||||
|
# Log the traceback explicitly (it's useful debug info) |
||||||
|
traceback.print_exc() |
||||||
|
exit(1) |
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__': |
||||||
|
main(sys.argv[1:]) |
@ -0,0 +1,773 @@ |
|||||||
|
No pyproject.toml, nothing installed: |
||||||
|
installed: |
||||||
|
# empty |
||||||
|
except: FileNotFoundError |
||||||
|
|
||||||
|
Nothing installed yet: |
||||||
|
installed: |
||||||
|
# empty |
||||||
|
pyproject.toml: | |
||||||
|
# empty |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
except: FileNotFoundError |
||||||
|
|
||||||
|
Insufficient version of setuptools: |
||||||
|
installed: |
||||||
|
setuptools: 5 |
||||||
|
wheel: 1 |
||||||
|
toml: 1 |
||||||
|
pyproject.toml: | |
||||||
|
# empty |
||||||
|
setup.py: | |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
result: 0 |
||||||
|
|
||||||
|
No pyproject.toml, empty setup.py: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
include_runtime: false |
||||||
|
setup.py: | |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(wheel) |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Default build system, empty setup.py: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
toml: 1 |
||||||
|
include_runtime: false |
||||||
|
pyproject.toml: | |
||||||
|
# empty |
||||||
|
setup.py: | |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(wheel) |
||||||
|
result: 0 |
||||||
|
|
||||||
|
pyproject.toml with build-backend and setup.py: |
||||||
|
generate_extras: true |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
toml: 1 |
||||||
|
setup.py: | |
||||||
|
# empty |
||||||
|
pyproject.toml: | |
||||||
|
[build-system] |
||||||
|
requires = [ |
||||||
|
"foo", |
||||||
|
] |
||||||
|
build-backend = "foo.build" |
||||||
|
expected: | |
||||||
|
python3dist(foo) |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Erroring setup.py: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
setup.py: | |
||||||
|
exit(77) |
||||||
|
result: 77 |
||||||
|
|
||||||
|
Bad character in version: |
||||||
|
installed: |
||||||
|
toml: 1 |
||||||
|
pyproject.toml: | |
||||||
|
[build-system] |
||||||
|
requires = ["pkg == 0.$.^.*"] |
||||||
|
except: ValueError |
||||||
|
|
||||||
|
Single value version with unsupported compatible operator: |
||||||
|
installed: |
||||||
|
toml: 1 |
||||||
|
pyproject.toml: | |
||||||
|
[build-system] |
||||||
|
requires = ["pkg ~= 42", "foo"] |
||||||
|
build-backend = "foo.build" |
||||||
|
except: ValueError |
||||||
|
|
||||||
|
Asterisk in version with unsupported compatible operator: |
||||||
|
installed: |
||||||
|
toml: 1 |
||||||
|
pyproject.toml: | |
||||||
|
[build-system] |
||||||
|
requires = ["pkg ~= 0.1.*", "foo"] |
||||||
|
build-backend = "foo.build" |
||||||
|
except: ValueError |
||||||
|
|
||||||
|
Local path as requirement: |
||||||
|
installed: |
||||||
|
toml: 1 |
||||||
|
pyproject.toml: | |
||||||
|
[build-system] |
||||||
|
requires = ["./pkg-1.2.3.tar.gz", "foo"] |
||||||
|
build-backend = "foo.build" |
||||||
|
except: ValueError |
||||||
|
|
||||||
|
Pip's egg=pkgName requirement not in requirements file: |
||||||
|
installed: |
||||||
|
toml: 1 |
||||||
|
pyproject.toml: | |
||||||
|
[build-system] |
||||||
|
requires = ["git+https://github.com/monty/spam.git@master#egg=spam", "foo"] |
||||||
|
build-backend = "foo.build" |
||||||
|
except: ValueError |
||||||
|
|
||||||
|
URL without egg fragment as requirement: |
||||||
|
installed: |
||||||
|
toml: 1 |
||||||
|
pyproject.toml: | |
||||||
|
[build-system] |
||||||
|
requires = ["git+https://github.com/pkg-dev/pkg.git@96dbe5e3", "foo"] |
||||||
|
build-backend = "foo.build" |
||||||
|
except: ValueError |
||||||
|
|
||||||
|
Build system dependencies in pyproject.toml with extras: |
||||||
|
generate_extras: true |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
toml: 1 |
||||||
|
pyproject.toml: | |
||||||
|
[build-system] |
||||||
|
requires = [ |
||||||
|
"foo", |
||||||
|
"bar[bAz] > 5", |
||||||
|
"ne!=1", |
||||||
|
"ge>=1.2.0", |
||||||
|
"le <= 1.2.3", |
||||||
|
"lt < 1.2.3.4 ", |
||||||
|
" gt > 1.2.3.4.5", |
||||||
|
"compatible ~= 0.4.0", |
||||||
|
"equal == 0.5.0", |
||||||
|
"arbitrary_equal === 0.6.0", |
||||||
|
"asterisk_equal == 0.6.*", |
||||||
|
"appdirs@https://github.com/ActiveState/appdirs/archive/8eacfa312d77aba28d483fbfb6f6fc54099622be.zip", |
||||||
|
"multi[Extras1,Extras2] == 6.0", |
||||||
|
"combo >2, <5, != 3.0.0", |
||||||
|
"py2 ; python_version < '2.7'", |
||||||
|
"py3 ; python_version > '3.0'", |
||||||
|
] |
||||||
|
build-backend = "foo.build" |
||||||
|
expected: | |
||||||
|
python3dist(foo) |
||||||
|
python3dist(bar) > 5.0 |
||||||
|
python3dist(bar[baz]) > 5.0 |
||||||
|
(python3dist(ne) < 1 or python3dist(ne) > 1) |
||||||
|
python3dist(ge) >= 1.2 |
||||||
|
python3dist(le) <= 1.2.3 |
||||||
|
python3dist(lt) < 1.2.3.4~~ |
||||||
|
python3dist(gt) > 1.2.3.4.5.0 |
||||||
|
(python3dist(compatible) >= 0.4 with python3dist(compatible) < 0.5) |
||||||
|
python3dist(equal) = 0.5 |
||||||
|
python3dist(arbitrary-equal) = 0.6 |
||||||
|
(python3dist(asterisk-equal) >= 0.6 with python3dist(asterisk-equal) < 0.7) |
||||||
|
python3dist(appdirs) |
||||||
|
python3dist(multi) = 6 |
||||||
|
python3dist(multi[extras1]) = 6 |
||||||
|
python3dist(multi[extras2]) = 6 |
||||||
|
((python3dist(combo) < 3 or python3dist(combo) > 3) with python3dist(combo) < 5~~ with python3dist(combo) > 2.0) |
||||||
|
python3dist(py3) |
||||||
|
stderr_contains: "WARNING: Simplifying 'appdirs@https://github.com/ActiveState/appdirs/archive/8eacfa312d77aba28d483fbfb6f6fc54099622be.zip' to 'appdirs'." |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Build system dependencies in pyproject.toml without extras: |
||||||
|
generate_extras: false |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
toml: 1 |
||||||
|
pyproject.toml: | |
||||||
|
[build-system] |
||||||
|
requires = [ |
||||||
|
"bar[Baz] > 5", |
||||||
|
"multi[extras1,extras2] == 6.0", |
||||||
|
] |
||||||
|
build-backend = "foo.build" |
||||||
|
expected: | |
||||||
|
python3dist(bar) > 5.0 |
||||||
|
python3dist(multi) = 6 |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Default build system, build dependencies in setup.py: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
include_runtime: false |
||||||
|
setup.py: | |
||||||
|
from setuptools import setup |
||||||
|
setup( |
||||||
|
name='test', |
||||||
|
version='0.1', |
||||||
|
setup_requires=['foo', 'bar!=2', 'baz~=1.1.1'], |
||||||
|
install_requires=['inst'], |
||||||
|
) |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(foo) |
||||||
|
(python3dist(bar) < 2 or python3dist(bar) > 2) |
||||||
|
(python3dist(baz) >= 1.1.1 with python3dist(baz) < 1.2) |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Default build system, run dependencies in setup.py: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
pyyaml: 1 |
||||||
|
setup.py: | |
||||||
|
from setuptools import setup |
||||||
|
setup( |
||||||
|
name='test', |
||||||
|
version='0.1', |
||||||
|
setup_requires=['pyyaml'], # nb. setuptools will try to install this |
||||||
|
install_requires=['inst > 1', 'inst2 < 3'], |
||||||
|
) |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(pyyaml) |
||||||
|
python3dist(inst) > 1.0 |
||||||
|
python3dist(inst2) < 3~~ |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Run dependencies with extras (not selected): |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
pyyaml: 1 |
||||||
|
setup.py: &pytest_setup_py | |
||||||
|
# slightly abriged copy of pytest's setup.py |
||||||
|
from setuptools import setup |
||||||
|
|
||||||
|
INSTALL_REQUIRES = [ |
||||||
|
"py>=1.5.0", |
||||||
|
"six>=1.10.0", |
||||||
|
"setuptools", |
||||||
|
"attrs>=17.4.0", |
||||||
|
'more-itertools>=4.0.0,<6.0.0;python_version<="2.7"', |
||||||
|
'more-itertools>=4.0.0;python_version>"2.7"', |
||||||
|
"atomicwrites>=1.0", |
||||||
|
'funcsigs>=1.0;python_version<"3.0"', |
||||||
|
'pathlib2>=2.2.0;python_version<"3.6"', |
||||||
|
'colorama;sys_platform=="win32"', |
||||||
|
"pluggy>=0.11", |
||||||
|
] |
||||||
|
|
||||||
|
def main(): |
||||||
|
setup( |
||||||
|
name = "pytest", |
||||||
|
version = "6.6.6", |
||||||
|
setup_requires=["setuptools>=40.0"], |
||||||
|
# fmt: off |
||||||
|
extras_require={ |
||||||
|
"testing": [ |
||||||
|
"argcomplete", |
||||||
|
"hypothesis>=3.56", |
||||||
|
"nose", |
||||||
|
"requests", |
||||||
|
"mock;python_version=='2.7'", |
||||||
|
], |
||||||
|
}, |
||||||
|
# fmt: on |
||||||
|
install_requires=INSTALL_REQUIRES, |
||||||
|
) |
||||||
|
|
||||||
|
if __name__ == "__main__": |
||||||
|
main() |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(setuptools) >= 40 |
||||||
|
python3dist(py) >= 1.5 |
||||||
|
python3dist(six) >= 1.10 |
||||||
|
python3dist(setuptools) |
||||||
|
python3dist(attrs) >= 17.4 |
||||||
|
python3dist(atomicwrites) >= 1 |
||||||
|
python3dist(pluggy) >= 0.11 |
||||||
|
python3dist(more-itertools) >= 4 |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Run dependencies with extras (selected): |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
pyyaml: 1 |
||||||
|
include_runtime: true |
||||||
|
extras: |
||||||
|
- testing |
||||||
|
setup.py: *pytest_setup_py |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(setuptools) >= 40 |
||||||
|
python3dist(py) >= 1.5 |
||||||
|
python3dist(six) >= 1.10 |
||||||
|
python3dist(setuptools) |
||||||
|
python3dist(attrs) >= 17.4 |
||||||
|
python3dist(atomicwrites) >= 1 |
||||||
|
python3dist(pluggy) >= 0.11 |
||||||
|
python3dist(more-itertools) >= 4 |
||||||
|
python3dist(argcomplete) |
||||||
|
python3dist(hypothesis) >= 3.56 |
||||||
|
python3dist(nose) |
||||||
|
python3dist(requests) |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Run dependencies with multiple extras: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
pyyaml: 1 |
||||||
|
include_runtime: true |
||||||
|
generate_extras: true |
||||||
|
extras: |
||||||
|
- testing,more-testing |
||||||
|
- even-more-testing , cool-feature |
||||||
|
setup.py: | |
||||||
|
from setuptools import setup |
||||||
|
setup( |
||||||
|
extras_require={ |
||||||
|
'testing': ['dep1'], |
||||||
|
'more-testing': ['dep2'], |
||||||
|
'even-more-testing': ['dep3'], |
||||||
|
'cool-feature': ['dep4[FOO,BAR]'], |
||||||
|
}, |
||||||
|
) |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(dep4) |
||||||
|
python3dist(dep4[bar]) |
||||||
|
python3dist(dep4[foo]) |
||||||
|
python3dist(dep3) |
||||||
|
python3dist(dep2) |
||||||
|
python3dist(dep1) |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Run dependencies with extras and build wheel option: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
pyyaml: 1 |
||||||
|
include_runtime: true |
||||||
|
build_wheel: true |
||||||
|
extras: |
||||||
|
- testing |
||||||
|
setup.py: *pytest_setup_py |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(setuptools) >= 40 |
||||||
|
python3dist(py) >= 1.5 |
||||||
|
python3dist(six) >= 1.10 |
||||||
|
python3dist(setuptools) |
||||||
|
python3dist(attrs) >= 17.4 |
||||||
|
python3dist(atomicwrites) >= 1 |
||||||
|
python3dist(pluggy) >= 0.11 |
||||||
|
python3dist(more-itertools) >= 4 |
||||||
|
python3dist(argcomplete) |
||||||
|
python3dist(hypothesis) >= 3.56 |
||||||
|
python3dist(nose) |
||||||
|
python3dist(requests) |
||||||
|
result: 0 |
||||||
|
stderr_contains: "Reading metadata from {wheeldir}/pytest-6.6.6-py3-none-any.whl" |
||||||
|
|
||||||
|
Tox dependencies: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
tox: 3.5.3 |
||||||
|
tox-current-env: 0.0.6 |
||||||
|
toxenv: |
||||||
|
- py3 |
||||||
|
setup.py: | |
||||||
|
from setuptools import setup |
||||||
|
setup( |
||||||
|
name='test', |
||||||
|
version='0.1', |
||||||
|
install_requires=['inst'], |
||||||
|
) |
||||||
|
tox.ini: | |
||||||
|
[tox] |
||||||
|
envlist = py36,py37,py38 |
||||||
|
[testenv] |
||||||
|
deps = |
||||||
|
toxdep1 |
||||||
|
toxdep2 |
||||||
|
commands = |
||||||
|
true |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(tox-current-env) >= 0.0.6 |
||||||
|
python3dist(toxdep1) |
||||||
|
python3dist(toxdep2) |
||||||
|
python3dist(inst) |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Tox extras: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
tox: 3.5.3 |
||||||
|
tox-current-env: 0.0.6 |
||||||
|
generate_extras: true |
||||||
|
toxenv: |
||||||
|
- py3 |
||||||
|
setup.py: | |
||||||
|
from setuptools import setup |
||||||
|
setup( |
||||||
|
name='test', |
||||||
|
version='0.1', |
||||||
|
install_requires=['inst'], |
||||||
|
extras_require={ |
||||||
|
'extra1': ['dep11 > 11', 'dep12'], |
||||||
|
'extra2': ['dep21', 'dep22', 'dep23', 'extra_dep[EXTRA_DEP]'], |
||||||
|
'nope': ['nopedep'], |
||||||
|
} |
||||||
|
) |
||||||
|
tox.ini: | |
||||||
|
[tox] |
||||||
|
envlist = py36,py37,py38 |
||||||
|
[testenv] |
||||||
|
deps = |
||||||
|
toxdep |
||||||
|
extras = |
||||||
|
extra2 |
||||||
|
extra1 |
||||||
|
commands = |
||||||
|
true |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(tox-current-env) >= 0.0.6 |
||||||
|
python3dist(toxdep) |
||||||
|
python3dist(inst) |
||||||
|
python3dist(dep11) > 11.0 |
||||||
|
python3dist(dep12) |
||||||
|
python3dist(dep21) |
||||||
|
python3dist(dep22) |
||||||
|
python3dist(dep23) |
||||||
|
python3dist(extra-dep) |
||||||
|
python3dist(extra-dep[extra_dep]) |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Tox provision unsatisfied: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
tox: 3.5.3 |
||||||
|
tox-current-env: 0.0.6 |
||||||
|
toxenv: |
||||||
|
- py3 |
||||||
|
setup.py: | |
||||||
|
from setuptools import setup |
||||||
|
setup( |
||||||
|
name='test', |
||||||
|
version='0.1', |
||||||
|
install_requires=['inst'], |
||||||
|
) |
||||||
|
tox.ini: | |
||||||
|
[tox] |
||||||
|
minversion = 3.999 |
||||||
|
requires = |
||||||
|
setuptools > 40 |
||||||
|
wheel > 2 |
||||||
|
[testenv] |
||||||
|
deps = |
||||||
|
toxdep1 |
||||||
|
toxdep2 |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(tox-current-env) >= 0.0.6 |
||||||
|
python3dist(tox) >= 3.999 |
||||||
|
python3dist(setuptools) > 40.0 |
||||||
|
python3dist(wheel) > 2.0 |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Tox provision satisfied: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
tox: 3.5.3 |
||||||
|
tox-current-env: 0.0.6 |
||||||
|
toxenv: |
||||||
|
- py3 |
||||||
|
setup.py: | |
||||||
|
from setuptools import setup |
||||||
|
setup( |
||||||
|
name='test', |
||||||
|
version='0.1', |
||||||
|
install_requires=['inst'], |
||||||
|
) |
||||||
|
tox.ini: | |
||||||
|
[tox] |
||||||
|
minversion = 3.5 |
||||||
|
requires = |
||||||
|
setuptools > 40 |
||||||
|
[testenv] |
||||||
|
deps = |
||||||
|
toxdep1 |
||||||
|
toxdep2 |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(tox-current-env) >= 0.0.6 |
||||||
|
python3dist(tox) >= 3.5 |
||||||
|
python3dist(setuptools) > 40.0 |
||||||
|
python3dist(toxdep1) |
||||||
|
python3dist(toxdep2) |
||||||
|
python3dist(inst) |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Default build system, unmet deps in requirements file: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
setup.py: | |
||||||
|
from setuptools import setup |
||||||
|
setup( |
||||||
|
name='test', |
||||||
|
version='0.1', |
||||||
|
) |
||||||
|
requirements.txt: | |
||||||
|
lxml |
||||||
|
ncclient |
||||||
|
cryptography |
||||||
|
paramiko |
||||||
|
SQLAlchemy |
||||||
|
requirement_files: |
||||||
|
- requirements.txt |
||||||
|
expected: | |
||||||
|
python3dist(lxml) |
||||||
|
python3dist(ncclient) |
||||||
|
python3dist(cryptography) |
||||||
|
python3dist(paramiko) |
||||||
|
python3dist(sqlalchemy) |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Default build system, met deps in requirements file: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
lxml: 3.9 |
||||||
|
ncclient: 1 |
||||||
|
cryptography: 2 |
||||||
|
paramiko: 1 |
||||||
|
SQLAlchemy: 1.0.90 |
||||||
|
setup.py: | |
||||||
|
from setuptools import setup |
||||||
|
setup( |
||||||
|
name='test', |
||||||
|
version='0.1', |
||||||
|
) |
||||||
|
requirements.txt: | |
||||||
|
lxml!=3.7.0,>=2.3 # OF-Config |
||||||
|
ncclient # OF-Config |
||||||
|
cryptography!=1.5.2 # Required by paramiko |
||||||
|
paramiko # NETCONF, BGP speaker (SSH console) |
||||||
|
SQLAlchemy>=1.0.10,<1.1.0 # Zebra protocol service |
||||||
|
requirement_files: |
||||||
|
- requirements.txt |
||||||
|
expected: | |
||||||
|
((python3dist(lxml) < 3.7 or python3dist(lxml) > 3.7) with python3dist(lxml) >= 2.3) |
||||||
|
python3dist(ncclient) |
||||||
|
(python3dist(cryptography) < 1.5.2 or python3dist(cryptography) > 1.5.2) |
||||||
|
python3dist(paramiko) |
||||||
|
(python3dist(sqlalchemy) < 1.1~~ with python3dist(sqlalchemy) >= 1.0.10) |
||||||
|
python3dist(setuptools) >= 40.8 |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(wheel) |
||||||
|
result: 0 |
||||||
|
|
||||||
|
With pyproject.toml, requirements file and with -N option: |
||||||
|
use_build_system: false |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
toml: 1 |
||||||
|
lxml: 3.9 |
||||||
|
ncclient: 1 |
||||||
|
cryptography: 2 |
||||||
|
paramiko: 1 |
||||||
|
SQLAlchemy: 1.0.90 |
||||||
|
pyproject.toml: | |
||||||
|
[build-system] |
||||||
|
requires = [ |
||||||
|
"foo", |
||||||
|
] |
||||||
|
build-backend = "foo.build" |
||||||
|
requirements.txt: | |
||||||
|
lxml |
||||||
|
ncclient |
||||||
|
cryptography |
||||||
|
paramiko |
||||||
|
SQLAlchemy |
||||||
|
git+https://github.com/monty/spam.git@master#egg=spam |
||||||
|
requirement_files: |
||||||
|
- requirements.txt |
||||||
|
expected: | |
||||||
|
python3dist(lxml) |
||||||
|
python3dist(ncclient) |
||||||
|
python3dist(cryptography) |
||||||
|
python3dist(paramiko) |
||||||
|
python3dist(sqlalchemy) |
||||||
|
python3dist(spam) |
||||||
|
stderr_contains: "WARNING: Simplifying 'spam@git+https://github.com/monty/spam.git@master#egg=spam' to 'spam'." |
||||||
|
result: 0 |
||||||
|
|
||||||
|
With pyproject.toml, requirements file and without -N option: |
||||||
|
use_build_system: true |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
toml: 1 |
||||||
|
lxml: 3.9 |
||||||
|
ncclient: 1 |
||||||
|
cryptography: 2 |
||||||
|
paramiko: 1 |
||||||
|
SQLAlchemy: 1.0.90 |
||||||
|
argcomplete: 1 |
||||||
|
hypothesis: 1 |
||||||
|
pyproject.toml: | |
||||||
|
[build-system] |
||||||
|
requires = [ |
||||||
|
"foo", |
||||||
|
] |
||||||
|
build-backend = "foo.build" |
||||||
|
requirements.txt: | |
||||||
|
lxml |
||||||
|
ncclient |
||||||
|
cryptography |
||||||
|
paramiko |
||||||
|
SQLAlchemy |
||||||
|
requirements1.in: | |
||||||
|
argcomplete |
||||||
|
hypothesis |
||||||
|
requirement_files: |
||||||
|
- requirements.txt |
||||||
|
- requirements1.in |
||||||
|
expected: | |
||||||
|
python3dist(lxml) |
||||||
|
python3dist(ncclient) |
||||||
|
python3dist(cryptography) |
||||||
|
python3dist(paramiko) |
||||||
|
python3dist(sqlalchemy) |
||||||
|
python3dist(argcomplete) |
||||||
|
python3dist(hypothesis) |
||||||
|
python3dist(foo) |
||||||
|
result: 0 |
||||||
|
|
||||||
|
Value error if -N and -r arguments are present: |
||||||
|
installed: |
||||||
|
# empty |
||||||
|
include_runtime: true |
||||||
|
use_build_system: false |
||||||
|
except: ValueError |
||||||
|
|
||||||
|
Value error if -N and -e arguments are present: |
||||||
|
installed: |
||||||
|
# empty |
||||||
|
toxenv: |
||||||
|
- py3 |
||||||
|
use_build_system: false |
||||||
|
except: ValueError |
||||||
|
|
||||||
|
Weird and complex requirements file: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
setup.py: | |
||||||
|
from setuptools import setup |
||||||
|
setup( |
||||||
|
name='test', |
||||||
|
version='0.1', |
||||||
|
) |
||||||
|
requirements.txt: | |
||||||
|
Normal_Req ~= 1.2.0 |
||||||
|
|
||||||
|
good@git+https://github.com/monty/spam.git@master#egg=bad |
||||||
|
git+https://github.com/monty/spam.git@master#egg=ugly |
||||||
|
|
||||||
|
this-name-is-too-\ |
||||||
|
long-for-this-file<\ |
||||||
|
=30 # even names and operators can be split |
||||||
|
|
||||||
|
# this is not a multi-line comment \ |
||||||
|
some-dep |
||||||
|
other-dep # but this *is* a multi-line coment \ |
||||||
|
so any garbage can be here |
||||||
|
dep-a # and this comment ends with the blank line below \ |
||||||
|
|
||||||
|
dep-b |
||||||
|
-r requirements2.txt |
||||||
|
${PACKAGE}${WANTED_VERSION} |
||||||
|
requirements2.txt: | |
||||||
|
dep-from-included-file |
||||||
|
requirement_files: |
||||||
|
- requirements.txt |
||||||
|
environ: |
||||||
|
PACKAGE: package |
||||||
|
WANTED_VERSION: -from-environ >= 1.2.3 |
||||||
|
expected: | |
||||||
|
(python3dist(normal-req) >= 1.2 with python3dist(normal-req) < 1.3) |
||||||
|
python3dist(good) |
||||||
|
python3dist(ugly) |
||||||
|
python3dist(this-name-is-too-long-for-this-file) <= 30 |
||||||
|
python3dist(some-dep) |
||||||
|
python3dist(other-dep) |
||||||
|
python3dist(dep-a) |
||||||
|
python3dist(dep-b) |
||||||
|
python3dist(dep-from-included-file) |
||||||
|
python3dist(package-from-environ) >= 1.2.3 |
||||||
|
stderr_contains: |
||||||
|
- "WARNING: Simplifying 'good@git+https://github.com/monty/spam.git@master#egg=bad' to 'good'." |
||||||
|
# XXX: pyproject_requirements_txt adds a prefix that's not actually in the source; |
||||||
|
# but that's good enough: |
||||||
|
- "WARNING: Simplifying 'ugly@git+https://github.com/monty/spam.git@master#egg=ugly' to 'ugly'." |
||||||
|
result: 0 |
||||||
|
|
||||||
|
|
||||||
|
Pre-releases are accepted: |
||||||
|
installed: |
||||||
|
setuptools: 50 |
||||||
|
wheel: 1 |
||||||
|
toml: 1 |
||||||
|
cffi: 1.15.0rc2 |
||||||
|
pyproject.toml: | |
||||||
|
[build-system] |
||||||
|
requires = [ |
||||||
|
"setuptools", |
||||||
|
"wheel", |
||||||
|
"cffi", |
||||||
|
] |
||||||
|
build-backend = "setuptools.build_meta" |
||||||
|
expected: | |
||||||
|
python3dist(setuptools) |
||||||
|
python3dist(wheel) |
||||||
|
python3dist(cffi) |
||||||
|
python3dist(wheel) |
||||||
|
stderr_contains: "Requirement satisfied: cffi" |
||||||
|
result: 0 |
@ -0,0 +1,15 @@ |
|||||||
|
import argparse |
||||||
|
import sys |
||||||
|
|
||||||
|
|
||||||
|
def main(argv): |
||||||
|
parser = argparse.ArgumentParser( |
||||||
|
description='Parse -e arguments instead of RPM getopt.' |
||||||
|
) |
||||||
|
parser.add_argument('-e', '--toxenv', action='append') |
||||||
|
args, _ = parser.parse_known_args(argv) |
||||||
|
return ','.join(args.toxenv) |
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__': |
||||||
|
print(main(sys.argv[1:])) |
@ -0,0 +1,171 @@ |
|||||||
|
# Copyright 2019 Gordon Messmer <gordon.messmer@gmail.com> |
||||||
|
# |
||||||
|
# Upstream: https://github.com/gordonmessmer/pyreq2rpm |
||||||
|
# |
||||||
|
# Permission is hereby granted, free of charge, to any person |
||||||
|
# obtaining a copy of this software and associated documentation files |
||||||
|
# (the "Software"), to deal in the Software without restriction, |
||||||
|
# including without limitation the rights to use, copy, modify, merge, |
||||||
|
# publish, distribute, sublicense, and/or sell copies of the Software, |
||||||
|
# and to permit persons to whom the Software is furnished to do so, |
||||||
|
# subject to the following conditions: |
||||||
|
# |
||||||
|
# The above copyright notice and this permission notice shall be |
||||||
|
# included in all copies or substantial portions of the Software. |
||||||
|
# |
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, |
||||||
|
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
||||||
|
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND |
||||||
|
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS |
||||||
|
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN |
||||||
|
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN |
||||||
|
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
||||||
|
# SOFTWARE. |
||||||
|
|
||||||
|
from packaging.requirements import Requirement |
||||||
|
from packaging.version import parse as parse_version |
||||||
|
|
||||||
|
class RpmVersion(): |
||||||
|
def __init__(self, version_id): |
||||||
|
version = parse_version(version_id) |
||||||
|
if isinstance(version._version, str): |
||||||
|
self.version = version._version |
||||||
|
else: |
||||||
|
self.epoch = version._version.epoch |
||||||
|
self.version = list(version._version.release) |
||||||
|
self.pre = version._version.pre |
||||||
|
self.dev = version._version.dev |
||||||
|
self.post = version._version.post |
||||||
|
# version.local is ignored as it is not expected to appear |
||||||
|
# in public releases |
||||||
|
# https://www.python.org/dev/peps/pep-0440/#local-version-identifiers |
||||||
|
|
||||||
|
def is_legacy(self): |
||||||
|
return isinstance(self.version, str) |
||||||
|
|
||||||
|
def increment(self): |
||||||
|
self.version[-1] += 1 |
||||||
|
self.pre = None |
||||||
|
self.dev = None |
||||||
|
self.post = None |
||||||
|
return self |
||||||
|
|
||||||
|
def __str__(self): |
||||||
|
if self.is_legacy(): |
||||||
|
return self.version |
||||||
|
if self.epoch: |
||||||
|
rpm_epoch = str(self.epoch) + ':' |
||||||
|
else: |
||||||
|
rpm_epoch = '' |
||||||
|
while len(self.version) > 1 and self.version[-1] == 0: |
||||||
|
self.version.pop() |
||||||
|
rpm_version = '.'.join(str(x) for x in self.version) |
||||||
|
if self.pre: |
||||||
|
rpm_suffix = '~{}'.format(''.join(str(x) for x in self.pre)) |
||||||
|
elif self.dev: |
||||||
|
rpm_suffix = '~~{}'.format(''.join(str(x) for x in self.dev)) |
||||||
|
elif self.post: |
||||||
|
rpm_suffix = '^post{}'.format(self.post[1]) |
||||||
|
else: |
||||||
|
rpm_suffix = '' |
||||||
|
return '{}{}{}'.format(rpm_epoch, rpm_version, rpm_suffix) |
||||||
|
|
||||||
|
def convert_compatible(name, operator, version_id): |
||||||
|
if version_id.endswith('.*'): |
||||||
|
return 'Invalid version' |
||||||
|
version = RpmVersion(version_id) |
||||||
|
if version.is_legacy(): |
||||||
|
# LegacyVersions are not supported in this context |
||||||
|
return 'Invalid version' |
||||||
|
if len(version.version) == 1: |
||||||
|
return 'Invalid version' |
||||||
|
upper_version = RpmVersion(version_id) |
||||||
|
upper_version.version.pop() |
||||||
|
upper_version.increment() |
||||||
|
return '({} >= {} with {} < {})'.format( |
||||||
|
name, version, name, upper_version) |
||||||
|
|
||||||
|
def convert_equal(name, operator, version_id): |
||||||
|
if version_id.endswith('.*'): |
||||||
|
version_id = version_id[:-2] + '.0' |
||||||
|
return convert_compatible(name, '~=', version_id) |
||||||
|
version = RpmVersion(version_id) |
||||||
|
return '{} = {}'.format(name, version) |
||||||
|
|
||||||
|
def convert_arbitrary_equal(name, operator, version_id): |
||||||
|
if version_id.endswith('.*'): |
||||||
|
return 'Invalid version' |
||||||
|
version = RpmVersion(version_id) |
||||||
|
return '{} = {}'.format(name, version) |
||||||
|
|
||||||
|
def convert_not_equal(name, operator, version_id): |
||||||
|
if version_id.endswith('.*'): |
||||||
|
version_id = version_id[:-2] |
||||||
|
version = RpmVersion(version_id) |
||||||
|
if version.is_legacy(): |
||||||
|
# LegacyVersions are not supported in this context |
||||||
|
return 'Invalid version' |
||||||
|
version_gt = RpmVersion(version_id).increment() |
||||||
|
version_gt_operator = '>=' |
||||||
|
# Prevent dev and pre-releases from satisfying a < requirement |
||||||
|
version = '{}~~'.format(version) |
||||||
|
else: |
||||||
|
version = RpmVersion(version_id) |
||||||
|
version_gt = version |
||||||
|
version_gt_operator = '>' |
||||||
|
return '({} < {} or {} {} {})'.format( |
||||||
|
name, version, name, version_gt_operator, version_gt) |
||||||
|
|
||||||
|
def convert_ordered(name, operator, version_id): |
||||||
|
if version_id.endswith('.*'): |
||||||
|
# PEP 440 does not define semantics for prefix matching |
||||||
|
# with ordered comparisons |
||||||
|
# see: https://github.com/pypa/packaging/issues/320 |
||||||
|
# and: https://github.com/pypa/packaging/issues/321 |
||||||
|
# This style of specifier is officially "unsupported", |
||||||
|
# even though it is processed. Support may be removed |
||||||
|
# in version 21.0. |
||||||
|
version_id = version_id[:-2] |
||||||
|
version = RpmVersion(version_id) |
||||||
|
if operator == '>': |
||||||
|
# distutils will allow a prefix match with '>' |
||||||
|
operator = '>=' |
||||||
|
if operator == '<=': |
||||||
|
# distutils will not allow a prefix match with '<=' |
||||||
|
operator = '<' |
||||||
|
else: |
||||||
|
version = RpmVersion(version_id) |
||||||
|
# For backwards compatibility, fallback to previous behavior with LegacyVersions |
||||||
|
if not version.is_legacy(): |
||||||
|
# Prevent dev and pre-releases from satisfying a < requirement |
||||||
|
if operator == '<' and not version.pre and not version.dev and not version.post: |
||||||
|
version = '{}~~'.format(version) |
||||||
|
# Prevent post-releases from satisfying a > requirement |
||||||
|
if operator == '>' and not version.pre and not version.dev and not version.post: |
||||||
|
version = '{}.0'.format(version) |
||||||
|
return '{} {} {}'.format(name, operator, version) |
||||||
|
|
||||||
|
OPERATORS = {'~=': convert_compatible, |
||||||
|
'==': convert_equal, |
||||||
|
'===': convert_arbitrary_equal, |
||||||
|
'!=': convert_not_equal, |
||||||
|
'<=': convert_ordered, |
||||||
|
'<': convert_ordered, |
||||||
|
'>=': convert_ordered, |
||||||
|
'>': convert_ordered} |
||||||
|
|
||||||
|
def convert(name, operator, version_id): |
||||||
|
return OPERATORS[operator](name, operator, version_id) |
||||||
|
|
||||||
|
def convert_requirement(req): |
||||||
|
parsed_req = Requirement.parse(req) |
||||||
|
reqs = [] |
||||||
|
for spec in parsed_req.specs: |
||||||
|
reqs.append(convert(parsed_req.project_name, spec[0], spec[1])) |
||||||
|
if len(reqs) == 0: |
||||||
|
return parsed_req.project_name |
||||||
|
if len(reqs) == 1: |
||||||
|
return reqs[0] |
||||||
|
else: |
||||||
|
reqs.sort() |
||||||
|
return '({})'.format(' with '.join(reqs)) |
@ -0,0 +1,85 @@ |
|||||||
|
import argparse |
||||||
|
import csv |
||||||
|
import json |
||||||
|
import os |
||||||
|
from pathlib import PosixPath |
||||||
|
|
||||||
|
from pyproject_save_files import BuildrootPath |
||||||
|
|
||||||
|
|
||||||
|
def read_record(record_path): |
||||||
|
""" |
||||||
|
A generator yielding individual RECORD triplets. |
||||||
|
|
||||||
|
https://www.python.org/dev/peps/pep-0376/#record |
||||||
|
|
||||||
|
The triplet is str-path, hash, size -- the last two optional. |
||||||
|
We will later care only for the paths anyway. |
||||||
|
|
||||||
|
Example: |
||||||
|
|
||||||
|
>>> g = read_record(PosixPath('./test_RECORD')) |
||||||
|
>>> next(g) |
||||||
|
['../../../bin/__pycache__/tldr.cpython-....pyc', '', ''] |
||||||
|
>>> next(g) |
||||||
|
['../../../bin/tldr', 'sha256=...', '12766'] |
||||||
|
>>> next(g) |
||||||
|
['../../../bin/tldr.py', 'sha256=...', '12766'] |
||||||
|
""" |
||||||
|
with open(record_path, newline="", encoding="utf-8") as f: |
||||||
|
yield from csv.reader( |
||||||
|
f, delimiter=",", quotechar='"', lineterminator=os.linesep |
||||||
|
) |
||||||
|
|
||||||
|
|
||||||
|
def parse_record(record_path, record_content): |
||||||
|
""" |
||||||
|
Returns a list with BuildrootPaths parsed from record_content |
||||||
|
|
||||||
|
params: |
||||||
|
record_path: RECORD BuildrootPath |
||||||
|
record_content: list of RECORD triplets |
||||||
|
first item is a str-path relative to directory where dist-info directory is |
||||||
|
(it can also be absolute according to the standard, but not from pip) |
||||||
|
|
||||||
|
Examples: |
||||||
|
>>> parse_record(BuildrootPath('/usr/lib/python3.7/site-packages/requests-2.22.0.dist-info/RECORD'), |
||||||
|
... [('requests/sessions.py', 'sha256=xxx', '666')]) |
||||||
|
['/usr/lib/python3.7/site-packages/requests/sessions.py'] |
||||||
|
|
||||||
|
>>> parse_record(BuildrootPath('/usr/lib/python3.7/site-packages/tldr-0.5.dist-info/RECORD'), |
||||||
|
... [('../../../bin/tldr', 'sha256=yyy', '777')]) |
||||||
|
['/usr/bin/tldr'] |
||||||
|
""" |
||||||
|
sitedir = record_path.parent.parent # trough the dist-info directory |
||||||
|
# / with absolute right operand will remove the left operand |
||||||
|
# any .. parts are resolved via normpath |
||||||
|
return [str((sitedir / row[0]).normpath()) for row in record_content] |
||||||
|
|
||||||
|
|
||||||
|
def save_parsed_record(record_path, parsed_record, output_file): |
||||||
|
content = {} |
||||||
|
if output_file.is_file(): |
||||||
|
content = json.loads(output_file.read_text()) |
||||||
|
content[str(record_path)] = parsed_record |
||||||
|
output_file.write_text(json.dumps(content)) |
||||||
|
|
||||||
|
|
||||||
|
def main(cli_args): |
||||||
|
record_path = BuildrootPath.from_real(cli_args.record, root=cli_args.buildroot) |
||||||
|
parsed_record = parse_record(record_path, read_record(cli_args.record)) |
||||||
|
save_parsed_record(record_path, parsed_record, cli_args.output) |
||||||
|
|
||||||
|
|
||||||
|
def argparser(): |
||||||
|
parser = argparse.ArgumentParser() |
||||||
|
r = parser.add_argument_group("required arguments") |
||||||
|
r.add_argument("--buildroot", type=PosixPath, required=True) |
||||||
|
r.add_argument("--record", type=PosixPath, required=True) |
||||||
|
r.add_argument("--output", type=PosixPath, required=True) |
||||||
|
return parser |
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__": |
||||||
|
cli_args = argparser().parse_args() |
||||||
|
main(cli_args) |
@ -0,0 +1,103 @@ |
|||||||
|
"""Best-effort parser for requirements.txt files""" |
||||||
|
|
||||||
|
import urllib.parse |
||||||
|
from pathlib import Path |
||||||
|
import sys |
||||||
|
import os |
||||||
|
import re |
||||||
|
|
||||||
|
# `#` starts a comment only at end of line and after whitespace |
||||||
|
COMMENT_RE = re.compile(r'(^|\s+)#.*$') |
||||||
|
|
||||||
|
# Assume URLs start with a scheme; don't look for "egg=" URLs otherwise |
||||||
|
URL_START_RE = re.compile(r'^[-_+a-zA-Z0-9]+://') |
||||||
|
|
||||||
|
ENV_VAR_RE = re.compile(r'(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})') |
||||||
|
PKGNAME_RE = re.compile(r'^[-_a-zA-Z0-9]+') |
||||||
|
|
||||||
|
# The requirements.txt format evolved rather organically; expect weirdness. |
||||||
|
|
||||||
|
def convert_requirements_txt(lines, path:Path = None): |
||||||
|
"""Convert lines of a requirements file to PEP 440-style requirement strs |
||||||
|
|
||||||
|
This does NOT handle all of requirements.txt features (only pip can do |
||||||
|
that), but tries its best. |
||||||
|
|
||||||
|
The resulting requirements might not actually be valid (either because |
||||||
|
they're wrong in the file, or because we missed a special case). |
||||||
|
|
||||||
|
path is the path to the requirements.txt file, used for options like `-r`. |
||||||
|
""" |
||||||
|
requirements = [] |
||||||
|
lines = combine_logical_lines(lines) |
||||||
|
lines = strip_comments(lines) |
||||||
|
lines = expand_env_vars(lines) |
||||||
|
if path: |
||||||
|
filename = path.name |
||||||
|
else: |
||||||
|
filename = '<requirements file>' |
||||||
|
for line in lines: |
||||||
|
if URL_START_RE.match(line): |
||||||
|
# Handle URLs with "egg=..." fragments |
||||||
|
# see https://pip.pypa.io/en/stable/cli/pip_install/#vcs-support |
||||||
|
parsed_url = urllib.parse.urlparse(line) |
||||||
|
parsed_fragment = urllib.parse.parse_qs(parsed_url.fragment) |
||||||
|
if 'egg' in parsed_fragment: |
||||||
|
# Prepend the package name to the URL. |
||||||
|
match = PKGNAME_RE.match(parsed_fragment['egg'][0]) |
||||||
|
if match: |
||||||
|
pkg_name = match[0] |
||||||
|
requirements.append(f'{pkg_name}@{line}') |
||||||
|
continue |
||||||
|
# If that didn't work, pass the line on; |
||||||
|
# the caller will deal with invalid requirements |
||||||
|
requirements.append(line) |
||||||
|
elif line.startswith('-r'): |
||||||
|
recursed_path = line[2:].strip() |
||||||
|
if path: |
||||||
|
recursed_path = path.parent / recursed_path |
||||||
|
recursed_path = Path(recursed_path) |
||||||
|
with recursed_path.open() as f: |
||||||
|
requirements.extend(convert_requirements_txt(f, recursed_path)) |
||||||
|
elif line.startswith('-'): |
||||||
|
raise ValueError(f'{filename}: unsupported requirements file option: {line}') |
||||||
|
else: |
||||||
|
requirements.append(line) |
||||||
|
return requirements |
||||||
|
|
||||||
|
def combine_logical_lines(lines): |
||||||
|
"""Combine logical lines together (backslash line-continuation)""" |
||||||
|
pieces = [] |
||||||
|
for line in lines: |
||||||
|
line = line.rstrip('\n') |
||||||
|
# Whole-line comments *only* are removed before line-contionuation |
||||||
|
if COMMENT_RE.match(line): |
||||||
|
continue |
||||||
|
if line.endswith('\\'): |
||||||
|
pieces.append(line[:-1]) |
||||||
|
else: |
||||||
|
# trailing whitespace is only removed from full logical lines |
||||||
|
pieces.append(line.rstrip()) |
||||||
|
yield ''.join(pieces) |
||||||
|
pieces = [] |
||||||
|
yield ''.join(pieces) |
||||||
|
|
||||||
|
|
||||||
|
def strip_comments(lines): |
||||||
|
for line in lines: |
||||||
|
line, *rest = COMMENT_RE.split(line, maxsplit=1) |
||||||
|
line = line.strip() |
||||||
|
if line: |
||||||
|
yield line |
||||||
|
|
||||||
|
|
||||||
|
def expand_env_vars(lines): |
||||||
|
def repl(match): |
||||||
|
value = os.getenv(match['name']) |
||||||
|
if value is None: |
||||||
|
return match['var'] |
||||||
|
return value |
||||||
|
for line in lines: |
||||||
|
if match := ENV_VAR_RE.search(line): |
||||||
|
var = match['var'] |
||||||
|
yield ENV_VAR_RE.sub(repl, line) |
@ -0,0 +1,675 @@ |
|||||||
|
import argparse |
||||||
|
import fnmatch |
||||||
|
import json |
||||||
|
import os |
||||||
|
|
||||||
|
from collections import defaultdict |
||||||
|
from keyword import iskeyword |
||||||
|
from pathlib import PosixPath, PurePosixPath |
||||||
|
from importlib.metadata import Distribution |
||||||
|
|
||||||
|
|
||||||
|
# From RPM's build/files.c strtokWithQuotes delim argument |
||||||
|
RPM_FILES_DELIMETERS = ' \n\t' |
||||||
|
|
||||||
|
# RPM hardcodes the lists of manpage extensions and directories, |
||||||
|
# so we have to maintain separate ones :( |
||||||
|
# There is an issue for RPM to provide the lists as macros: |
||||||
|
# https://github.com/rpm-software-management/rpm/issues/1865 |
||||||
|
# The original lists can be found here: |
||||||
|
# https://github.com/rpm-software-management/rpm/blob/master/scripts/brp-compress |
||||||
|
MANPAGE_EXTENSIONS = ['gz', 'Z', 'bz2', 'xz', 'lzma', 'zst', 'zstd'] |
||||||
|
MANDIRS = [ |
||||||
|
'/man/man*', |
||||||
|
'/man/*/man*', |
||||||
|
'/info', |
||||||
|
'/share/man/man*', |
||||||
|
'/share/man/*/man*', |
||||||
|
'/share/info', |
||||||
|
'/kerberos/man', |
||||||
|
'/X11R6/man/man*', |
||||||
|
'/lib/perl5/man/man*', |
||||||
|
'/share/doc/*/man/man*', |
||||||
|
'/lib/*/man/man*', |
||||||
|
'/share/fish/man/man*', |
||||||
|
] |
||||||
|
|
||||||
|
|
||||||
|
class BuildrootPath(PurePosixPath): |
||||||
|
""" |
||||||
|
This path represents a path in a buildroot. |
||||||
|
When absolute, it is "relative" to a buildroot. |
||||||
|
|
||||||
|
E.g. /usr/lib means %{buildroot}/usr/lib |
||||||
|
The object carries no buildroot information. |
||||||
|
""" |
||||||
|
|
||||||
|
@staticmethod |
||||||
|
def from_real(realpath, *, root): |
||||||
|
""" |
||||||
|
For a given real disk path, return a BuildrootPath in the given root. |
||||||
|
|
||||||
|
For example:: |
||||||
|
|
||||||
|
>>> BuildrootPath.from_real(PosixPath('/tmp/buildroot/foo'), root=PosixPath('/tmp/buildroot')) |
||||||
|
BuildrootPath('/foo') |
||||||
|
""" |
||||||
|
return BuildrootPath("/") / realpath.relative_to(root) |
||||||
|
|
||||||
|
def to_real(self, root): |
||||||
|
""" |
||||||
|
Return a real PosixPath in the given root |
||||||
|
|
||||||
|
For example:: |
||||||
|
|
||||||
|
>>> BuildrootPath('/foo').to_real(PosixPath('/tmp/buildroot')) |
||||||
|
PosixPath('/tmp/buildroot/foo') |
||||||
|
""" |
||||||
|
return root / self.relative_to("/") |
||||||
|
|
||||||
|
def normpath(self): |
||||||
|
""" |
||||||
|
Normalize all the potential /../ parts of the path without touching real files. |
||||||
|
|
||||||
|
PurePaths don't have .resolve(). |
||||||
|
Paths have .resolve() but it touches real files. |
||||||
|
This is an alternative. It assumes there are no symbolic links. |
||||||
|
|
||||||
|
Example: |
||||||
|
|
||||||
|
>>> BuildrootPath('/usr/lib/python/../pypy').normpath() |
||||||
|
BuildrootPath('/usr/lib/pypy') |
||||||
|
""" |
||||||
|
return type(self)(os.path.normpath(self)) |
||||||
|
|
||||||
|
|
||||||
|
def pycache_dir(script): |
||||||
|
""" |
||||||
|
For a script BuildrootPath, return a BuildrootPath of its __pycache__ directory. |
||||||
|
|
||||||
|
Example: |
||||||
|
|
||||||
|
>>> pycache_dir(BuildrootPath('/whatever/bar.py')) |
||||||
|
BuildrootPath('/whatever/__pycache__') |
||||||
|
|
||||||
|
>>> pycache_dir(BuildrootPath('/opt/python3.10/foo.py')) |
||||||
|
BuildrootPath('/opt/python3.10/__pycache__') |
||||||
|
""" |
||||||
|
return script.parent / "__pycache__" |
||||||
|
|
||||||
|
|
||||||
|
def pycached(script, python_version): |
||||||
|
""" |
||||||
|
For a script BuildrootPath, return a list with that path and its bytecode glob. |
||||||
|
Like the %pycached macro. |
||||||
|
|
||||||
|
The glob is represented as a BuildrootPath. |
||||||
|
|
||||||
|
Examples: |
||||||
|
|
||||||
|
>>> pycached(BuildrootPath('/whatever/bar.py'), '3.8') |
||||||
|
[BuildrootPath('/whatever/bar.py'), BuildrootPath('/whatever/__pycache__/bar.cpython-38{,.opt-?}.pyc')] |
||||||
|
|
||||||
|
>>> pycached(BuildrootPath('/opt/python3.10/foo.py'), '3.10') |
||||||
|
[BuildrootPath('/opt/python3.10/foo.py'), BuildrootPath('/opt/python3.10/__pycache__/foo.cpython-310{,.opt-?}.pyc')] |
||||||
|
""" |
||||||
|
assert script.suffix == ".py" |
||||||
|
pyver = "".join(python_version.split(".")[:2]) |
||||||
|
pycname = f"{script.stem}.cpython-{pyver}{{,.opt-?}}.pyc" |
||||||
|
pyc = pycache_dir(script) / pycname |
||||||
|
return [script, pyc] |
||||||
|
|
||||||
|
|
||||||
|
def add_file_to_module(paths, module_name, module_type, files_dirs, *files): |
||||||
|
""" |
||||||
|
Helper procedure, adds given files to the module_name of a given module_type |
||||||
|
""" |
||||||
|
for module in paths["modules"][module_name]: |
||||||
|
if module["type"] == module_type: |
||||||
|
if files[0] not in module[files_dirs]: |
||||||
|
module[files_dirs].extend(files) |
||||||
|
break |
||||||
|
else: |
||||||
|
paths["modules"][module_name].append( |
||||||
|
{"type": module_type, "files": [], "dirs": [], files_dirs: list(files)} |
||||||
|
) |
||||||
|
|
||||||
|
|
||||||
|
def add_py_file_to_module(paths, module_name, module_type, path, python_version, |
||||||
|
*, include_pycache_dir): |
||||||
|
""" |
||||||
|
Helper procedure, adds given .py file to the module_name of a given module_type |
||||||
|
Always also adds the bytecode cache. |
||||||
|
If include_pycache_dir is set, also include the __pycache__ directory. |
||||||
|
""" |
||||||
|
add_file_to_module(paths, module_name, module_type, "files", *pycached(path, python_version)) |
||||||
|
if include_pycache_dir: |
||||||
|
add_file_to_module(paths, module_name, module_type, "dirs", pycache_dir(path)) |
||||||
|
|
||||||
|
|
||||||
|
def add_lang_to_module(paths, module_name, path): |
||||||
|
""" |
||||||
|
Helper procedure, divides lang files by language and adds them to the module_name |
||||||
|
|
||||||
|
Returns True if the language code detection was successful |
||||||
|
""" |
||||||
|
for i, parent in enumerate(path.parents): |
||||||
|
if i > 0 and parent.name == 'locale': |
||||||
|
lang_country_code = path.parents[i-1].name |
||||||
|
break |
||||||
|
else: |
||||||
|
return False |
||||||
|
# convert potential en_US to plain en |
||||||
|
lang_code = lang_country_code.partition('_')[0] |
||||||
|
if module_name not in paths["lang"]: |
||||||
|
paths["lang"].update({module_name: defaultdict(list)}) |
||||||
|
paths["lang"][module_name][lang_code].append(path) |
||||||
|
return True |
||||||
|
|
||||||
|
|
||||||
|
def prepend_mandirs(prefix): |
||||||
|
""" |
||||||
|
Return the list of man page directories prepended with the given prefix. |
||||||
|
""" |
||||||
|
return [str(prefix) + mandir for mandir in MANDIRS] |
||||||
|
|
||||||
|
|
||||||
|
def normalize_manpage_filename(prefix, path): |
||||||
|
""" |
||||||
|
If a path is processed by RPM's brp-compress script, strip it of the extension |
||||||
|
(if the extension matches one of the listed by brp-compress), |
||||||
|
append '*' to the filename and return it. If not, return the unchanged path. |
||||||
|
Rationale: https://docs.fedoraproject.org/en-US/packaging-guidelines/#_manpages |
||||||
|
|
||||||
|
Examples: |
||||||
|
|
||||||
|
>>> normalize_manpage_filename(PosixPath('/usr'), BuildrootPath('/usr/share/man/de/man1/linkchecker.1')) |
||||||
|
BuildrootPath('/usr/share/man/de/man1/linkchecker.1*') |
||||||
|
|
||||||
|
>>> normalize_manpage_filename(PosixPath('/usr'), BuildrootPath('/usr/share/doc/en/man/man1/getmac.1')) |
||||||
|
BuildrootPath('/usr/share/doc/en/man/man1/getmac.1*') |
||||||
|
|
||||||
|
>>> normalize_manpage_filename(PosixPath('/usr'), BuildrootPath('/usr/share/man/man8/abc.8.zstd')) |
||||||
|
BuildrootPath('/usr/share/man/man8/abc.8*') |
||||||
|
|
||||||
|
>>> normalize_manpage_filename(PosixPath('/usr'), BuildrootPath('/usr/kerberos/man/dir')) |
||||||
|
BuildrootPath('/usr/kerberos/man/dir') |
||||||
|
|
||||||
|
>>> normalize_manpage_filename(PosixPath('/usr'), BuildrootPath('/usr/kerberos/man/dir.1')) |
||||||
|
BuildrootPath('/usr/kerberos/man/dir.1*') |
||||||
|
|
||||||
|
>>> normalize_manpage_filename(PosixPath('/usr'), BuildrootPath('/usr/bin/getmac')) |
||||||
|
BuildrootPath('/usr/bin/getmac') |
||||||
|
""" |
||||||
|
|
||||||
|
prefixed_mandirs = prepend_mandirs(prefix) |
||||||
|
for mandir in prefixed_mandirs: |
||||||
|
# "dir" is explicitly excluded by RPM |
||||||
|
# https://github.com/rpm-software-management/rpm/blob/rpm-4.17.0-release/scripts/brp-compress#L24 |
||||||
|
if fnmatch.fnmatch(str(path.parent), mandir) and path.name != "dir": |
||||||
|
# "abc.1.gz2" -> "abc.1*" |
||||||
|
if path.suffix[1:] in MANPAGE_EXTENSIONS: |
||||||
|
return BuildrootPath(path.parent / (path.stem + "*")) |
||||||
|
# "abc.1 -> abc.1*" |
||||||
|
else: |
||||||
|
return BuildrootPath(path.parent / (path.name + "*")) |
||||||
|
else: |
||||||
|
return path |
||||||
|
|
||||||
|
|
||||||
|
def is_valid_module_name(s): |
||||||
|
"""Return True if a string is considered a valid module name and False otherwise. |
||||||
|
|
||||||
|
String must be a valid Python name, not a Python keyword and must not |
||||||
|
start with underscore - we treat those as private. |
||||||
|
Examples: |
||||||
|
|
||||||
|
>>> is_valid_module_name('module_name') |
||||||
|
True |
||||||
|
|
||||||
|
>>> is_valid_module_name('12module_name') |
||||||
|
False |
||||||
|
|
||||||
|
>>> is_valid_module_name('module-name') |
||||||
|
False |
||||||
|
|
||||||
|
>>> is_valid_module_name('return') |
||||||
|
False |
||||||
|
|
||||||
|
>>> is_valid_module_name('_module_name') |
||||||
|
False |
||||||
|
""" |
||||||
|
if (s.isidentifier() and not iskeyword(s) and not s.startswith("_")): |
||||||
|
return True |
||||||
|
return False |
||||||
|
|
||||||
|
|
||||||
|
def module_names_from_path(path): |
||||||
|
"""Get all importable module names from given path. |
||||||
|
|
||||||
|
Paths containing ".py" and ".so" files are considered importable modules, |
||||||
|
and so their respective directories (ie. "foo/bar/baz.py": "foo", "foo.bar", |
||||||
|
"foo.bar.baz"). |
||||||
|
Paths containing invalid Python strings are discarded. |
||||||
|
|
||||||
|
Return set of all valid possibilities. |
||||||
|
""" |
||||||
|
# Discard all files that are not valid modules |
||||||
|
if path.suffix not in (".py", ".so"): |
||||||
|
return set() |
||||||
|
|
||||||
|
parts = list(path.parts) |
||||||
|
|
||||||
|
# Modify the file names according to their suffixes |
||||||
|
if path.suffix == ".py": |
||||||
|
parts[-1] = path.stem |
||||||
|
elif path.suffix == ".so": |
||||||
|
# .so files can have two suffixes - cut both of them |
||||||
|
parts[-1] = PosixPath(path.stem).stem |
||||||
|
|
||||||
|
# '__init__' indicates a module but we don't want to import the actual file |
||||||
|
# It's unclear whether there can be __init__.so files in the Python packages. |
||||||
|
# The idea to implement this file was raised in 2008 on Python-ideas mailing list |
||||||
|
# (https://mail.python.org/pipermail/python-ideas/2008-October/002292.html) |
||||||
|
# and there are a few reports of people compiling their __init__.py to __init__.so. |
||||||
|
# However it's not officially documented nor forbidden, |
||||||
|
# so we're checking for the stem after stripping the suffix from the file. |
||||||
|
if parts[-1] == "__init__": |
||||||
|
del parts[-1] |
||||||
|
|
||||||
|
# For each part of the path check whether it's valid |
||||||
|
# If not, discard the whole path - return an empty set |
||||||
|
for path_part in parts: |
||||||
|
if not is_valid_module_name(path_part): |
||||||
|
return set() |
||||||
|
else: |
||||||
|
return {'.'.join(parts[:x+1]) for x in range(len(parts))} |
||||||
|
|
||||||
|
|
||||||
|
def classify_paths( |
||||||
|
record_path, parsed_record_content, metadata, sitedirs, python_version, prefix |
||||||
|
): |
||||||
|
""" |
||||||
|
For each BuildrootPath in parsed_record_content classify it to a dict structure |
||||||
|
that allows to filter the files for the %files and %check section easier. |
||||||
|
|
||||||
|
For the dict structure, look at the beginning of this function's code. |
||||||
|
|
||||||
|
Each "module" is a dict with "type" ("package", "script", "extension"), and "files" and "dirs". |
||||||
|
""" |
||||||
|
distinfo = record_path.parent |
||||||
|
paths = { |
||||||
|
"metadata": { |
||||||
|
"files": [], # regular %file entries with dist-info content |
||||||
|
"dirs": [distinfo], # %dir %file entries with dist-info directory |
||||||
|
"docs": [], # to be used once there is upstream way to recognize READMEs |
||||||
|
"licenses": [], # %license entries parsed from dist-info METADATA file |
||||||
|
}, |
||||||
|
"lang": {}, # %lang entries: [module_name or None][language_code] lists of .mo files |
||||||
|
"modules": defaultdict(list), # each importable module (directory, .py, .so) |
||||||
|
"module_names": set(), # qualified names of each importable module ("foo.bar.baz") |
||||||
|
"other": {"files": []}, # regular %file entries we could not parse :( |
||||||
|
} |
||||||
|
|
||||||
|
# In RECORDs generated by pip, there are no directories, only files. |
||||||
|
# The example RECORD from PEP 376 does not contain directories either. |
||||||
|
# Hence, we'll only assume files, but TODO get it officially documented. |
||||||
|
license_files = metadata.get_all('License-File') |
||||||
|
for path in parsed_record_content: |
||||||
|
if path.suffix == ".pyc": |
||||||
|
# we handle bytecode separately |
||||||
|
continue |
||||||
|
|
||||||
|
if distinfo in path.parents: |
||||||
|
if path.parent == distinfo and path.name in ("RECORD", "REQUESTED"): |
||||||
|
# RECORD and REQUESTED files are removed in %pyproject_install |
||||||
|
# See PEP 627 |
||||||
|
continue |
||||||
|
if license_files and str(path.relative_to(distinfo)) in license_files: |
||||||
|
paths["metadata"]["licenses"].append(path) |
||||||
|
else: |
||||||
|
paths["metadata"]["files"].append(path) |
||||||
|
# nested directories within distinfo |
||||||
|
index = path.parents.index(distinfo) |
||||||
|
for parent in list(path.parents)[:index]: # no direct slice until Python 3.10 |
||||||
|
if parent not in paths["metadata"]["dirs"]: |
||||||
|
paths["metadata"]["dirs"].append(parent) |
||||||
|
continue |
||||||
|
|
||||||
|
for sitedir in sitedirs: |
||||||
|
if sitedir in path.parents: |
||||||
|
# Get only the part without sitedir prefix to classify module names |
||||||
|
relative_path = path.relative_to(sitedir) |
||||||
|
paths["module_names"].update(module_names_from_path(relative_path)) |
||||||
|
if path.parent == sitedir: |
||||||
|
if path.suffix == ".so": |
||||||
|
# extension modules can have 2 suffixes |
||||||
|
name = BuildrootPath(path.stem).stem |
||||||
|
add_file_to_module(paths, name, "extension", "files", path) |
||||||
|
elif path.suffix == ".py": |
||||||
|
name = path.stem |
||||||
|
# we add the .pyc files, but not top-level __pycache__ |
||||||
|
add_py_file_to_module( |
||||||
|
paths, name, "script", path, python_version, |
||||||
|
include_pycache_dir=False |
||||||
|
) |
||||||
|
else: |
||||||
|
paths["other"]["files"].append(path) |
||||||
|
else: |
||||||
|
# this file is inside a dir, we add all dirs upwards until sitedir |
||||||
|
index = path.parents.index(sitedir) |
||||||
|
module_dir = path.parents[index - 1] |
||||||
|
for parent in list(path.parents)[:index]: # no direct slice until Python 3.10 |
||||||
|
add_file_to_module(paths, module_dir.name, "package", "dirs", parent) |
||||||
|
is_lang = False |
||||||
|
if path.suffix == ".mo": |
||||||
|
is_lang = add_lang_to_module(paths, module_dir.name, path) |
||||||
|
if not is_lang: |
||||||
|
if path.suffix == ".py": |
||||||
|
# we add the .pyc files, and their __pycache__ |
||||||
|
add_py_file_to_module( |
||||||
|
paths, module_dir.name, "package", path, python_version, |
||||||
|
include_pycache_dir=True |
||||||
|
) |
||||||
|
else: |
||||||
|
add_file_to_module(paths, module_dir.name, "package", "files", path) |
||||||
|
break |
||||||
|
else: |
||||||
|
if path.suffix == ".mo": |
||||||
|
add_lang_to_module(paths, None, path) or paths["other"]["files"].append(path) |
||||||
|
else: |
||||||
|
path = normalize_manpage_filename(prefix, path) |
||||||
|
paths["other"]["files"].append(path) |
||||||
|
|
||||||
|
return paths |
||||||
|
|
||||||
|
|
||||||
|
def escape_rpm_path(path): |
||||||
|
""" |
||||||
|
Escape special characters in string-paths or BuildrootPaths |
||||||
|
|
||||||
|
E.g. a space in path otherwise makes RPM think it's multiple paths, |
||||||
|
unless we put it in "quotes". |
||||||
|
Or a literal % symbol in path might be expanded as a macro if not escaped. |
||||||
|
|
||||||
|
Due to limitations in RPM, |
||||||
|
some paths with spaces and other special characters are not supported. |
||||||
|
|
||||||
|
Examples: |
||||||
|
|
||||||
|
>>> escape_rpm_path(BuildrootPath('/usr/lib/python3.9/site-packages/setuptools')) |
||||||
|
'/usr/lib/python3.9/site-packages/setuptools' |
||||||
|
|
||||||
|
>>> escape_rpm_path('/usr/lib/python3.9/site-packages/setuptools/script (dev).tmpl') |
||||||
|
'"/usr/lib/python3.9/site-packages/setuptools/script (dev).tmpl"' |
||||||
|
|
||||||
|
>>> escape_rpm_path('/usr/share/data/100%valid.path') |
||||||
|
'/usr/share/data/100%%%%%%%%valid.path' |
||||||
|
|
||||||
|
>>> escape_rpm_path('/usr/share/data/100 % valid.path') |
||||||
|
'"/usr/share/data/100 %%%%%%%% valid.path"' |
||||||
|
|
||||||
|
>>> escape_rpm_path('/usr/share/data/1000 %% valid.path') |
||||||
|
'"/usr/share/data/1000 %%%%%%%%%%%%%%%% valid.path"' |
||||||
|
|
||||||
|
>>> escape_rpm_path('/usr/share/data/spaces and "quotes"') |
||||||
|
Traceback (most recent call last): |
||||||
|
... |
||||||
|
NotImplementedError: ... |
||||||
|
|
||||||
|
>>> escape_rpm_path('/usr/share/data/spaces and [square brackets]') |
||||||
|
Traceback (most recent call last): |
||||||
|
... |
||||||
|
NotImplementedError: ... |
||||||
|
""" |
||||||
|
orig_path = path = str(path) |
||||||
|
if "%" in path: |
||||||
|
# Escaping by 8 %s has been verified in RPM 4.16 and 4.17, but probably not stable |
||||||
|
# See this thread http://lists.rpm.org/pipermail/rpm-list/2021-June/002048.html |
||||||
|
# On the CI, we build tests/escape_percentages.spec to verify this assumption |
||||||
|
path = path.replace("%", "%" * 8) |
||||||
|
if any(symbol in path for symbol in RPM_FILES_DELIMETERS): |
||||||
|
if '"' in path: |
||||||
|
# As far as we know, RPM cannot list such file individually |
||||||
|
# See this thread http://lists.rpm.org/pipermail/rpm-list/2021-June/002048.html |
||||||
|
raise NotImplementedError(f'" symbol in path with spaces is not supported by %pyproject_save_files: {orig_path!r}') |
||||||
|
if "[" in path or "]" in path: |
||||||
|
# See https://bugzilla.redhat.com/show_bug.cgi?id=1990879 |
||||||
|
# and https://github.com/rpm-software-management/rpm/issues/1749 |
||||||
|
raise NotImplementedError(f'[ or ] symbol in path with spaces is not supported by %pyproject_save_files: {orig_path!r}') |
||||||
|
return f'"{path}"' |
||||||
|
return path |
||||||
|
|
||||||
|
|
||||||
|
def generate_file_list(paths_dict, module_globs, include_others=False): |
||||||
|
""" |
||||||
|
This function takes the classified paths_dict and turns it into lines |
||||||
|
for the %files section. Returns list with text lines, no Path objects. |
||||||
|
|
||||||
|
Only includes files from modules that match module_globs, metadata and |
||||||
|
optionaly all other files. |
||||||
|
|
||||||
|
It asserts that all globs match at least one module, raises ValueError otherwise. |
||||||
|
Multiple globs matching identical module(s) are OK. |
||||||
|
""" |
||||||
|
files = set() |
||||||
|
|
||||||
|
if include_others: |
||||||
|
files.update(f"{escape_rpm_path(p)}" for p in paths_dict["other"]["files"]) |
||||||
|
try: |
||||||
|
for lang_code in paths_dict["lang"][None]: |
||||||
|
files.update(f"%lang({lang_code}) {escape_rpm_path(p)}" for p in paths_dict["lang"][None][lang_code]) |
||||||
|
except KeyError: |
||||||
|
pass |
||||||
|
|
||||||
|
files.update(f"{escape_rpm_path(p)}" for p in paths_dict["metadata"]["files"]) |
||||||
|
for macro in "dir", "doc", "license": |
||||||
|
files.update(f"%{macro} {escape_rpm_path(p)}" for p in paths_dict["metadata"][f"{macro}s"]) |
||||||
|
|
||||||
|
modules = paths_dict["modules"] |
||||||
|
done_modules = set() |
||||||
|
done_globs = set() |
||||||
|
|
||||||
|
for glob in module_globs: |
||||||
|
for name in modules: |
||||||
|
if fnmatch.fnmatchcase(name, glob): |
||||||
|
if name not in done_modules: |
||||||
|
try: |
||||||
|
for lang_code in paths_dict["lang"][name]: |
||||||
|
files.update(f"%lang({lang_code}) {escape_rpm_path(p)}" for p in paths_dict["lang"][name][lang_code]) |
||||||
|
except KeyError: |
||||||
|
pass |
||||||
|
for module in modules[name]: |
||||||
|
files.update(f"%dir {escape_rpm_path(p)}" for p in module["dirs"]) |
||||||
|
files.update(f"{escape_rpm_path(p)}" for p in module["files"]) |
||||||
|
done_modules.add(name) |
||||||
|
done_globs.add(glob) |
||||||
|
|
||||||
|
missed = module_globs - done_globs |
||||||
|
if missed: |
||||||
|
missed_text = ", ".join(sorted(missed)) |
||||||
|
raise ValueError(f"Globs did not match any module: {missed_text}") |
||||||
|
|
||||||
|
return sorted(files) |
||||||
|
|
||||||
|
|
||||||
|
def parse_varargs(varargs): |
||||||
|
""" |
||||||
|
Parse varargs from the %pyproject_save_files macro |
||||||
|
|
||||||
|
Arguments starting with + are treated as a flags, everything else is a glob |
||||||
|
|
||||||
|
Returns as set of globs, boolean flag whether to include all the other files |
||||||
|
|
||||||
|
Raises ValueError for unknown flags and globs with dots (namespace packages). |
||||||
|
|
||||||
|
Good examples: |
||||||
|
|
||||||
|
>>> parse_varargs(['*']) |
||||||
|
({'*'}, False) |
||||||
|
|
||||||
|
>>> mods, auto = parse_varargs(['requests*', 'kerberos', '+auto']) |
||||||
|
>>> auto |
||||||
|
True |
||||||
|
>>> sorted(mods) |
||||||
|
['kerberos', 'requests*'] |
||||||
|
|
||||||
|
>>> mods, auto = parse_varargs(['tldr', 'tensorf*']) |
||||||
|
>>> auto |
||||||
|
False |
||||||
|
>>> sorted(mods) |
||||||
|
['tensorf*', 'tldr'] |
||||||
|
|
||||||
|
>>> parse_varargs(['+auto']) |
||||||
|
(set(), True) |
||||||
|
|
||||||
|
Bad examples: |
||||||
|
|
||||||
|
>>> parse_varargs(['+kinkdir']) |
||||||
|
Traceback (most recent call last): |
||||||
|
... |
||||||
|
ValueError: Invalid argument: +kinkdir |
||||||
|
|
||||||
|
>>> parse_varargs(['good', '+bad', '*ugly*']) |
||||||
|
Traceback (most recent call last): |
||||||
|
... |
||||||
|
ValueError: Invalid argument: +bad |
||||||
|
|
||||||
|
>>> parse_varargs(['+bad', 'my.bad']) |
||||||
|
Traceback (most recent call last): |
||||||
|
... |
||||||
|
ValueError: Invalid argument: +bad |
||||||
|
|
||||||
|
>>> parse_varargs(['mod', 'mod.*']) |
||||||
|
Traceback (most recent call last): |
||||||
|
... |
||||||
|
ValueError: Attempted to use a namespaced package with . in the glob: mod.*. ... |
||||||
|
|
||||||
|
>>> parse_varargs(['my.bad', '+bad']) |
||||||
|
Traceback (most recent call last): |
||||||
|
... |
||||||
|
ValueError: Attempted to use a namespaced package with . in the glob: my.bad. ... |
||||||
|
|
||||||
|
>>> parse_varargs(['mod/submod']) |
||||||
|
Traceback (most recent call last): |
||||||
|
... |
||||||
|
ValueError: Attempted to use a namespaced package with / in the glob: mod/submod. ... |
||||||
|
""" |
||||||
|
include_auto = False |
||||||
|
globs = set() |
||||||
|
namespace_error_template = ( |
||||||
|
"Attempted to use a namespaced package with {symbol} in the glob: {arg}. " |
||||||
|
"That is not (yet) supported. Use {top} instead and see " |
||||||
|
"https://bugzilla.redhat.com/1935266 for details." |
||||||
|
) |
||||||
|
for arg in varargs: |
||||||
|
if arg.startswith("+"): |
||||||
|
if arg == "+auto": |
||||||
|
include_auto = True |
||||||
|
else: |
||||||
|
raise ValueError(f"Invalid argument: {arg}") |
||||||
|
elif "." in arg: |
||||||
|
top, *_ = arg.partition(".") |
||||||
|
raise ValueError(namespace_error_template.format(symbol=".", arg=arg, top=top)) |
||||||
|
elif "/" in arg: |
||||||
|
top, *_ = arg.partition("/") |
||||||
|
raise ValueError(namespace_error_template.format(symbol="/", arg=arg, top=top)) |
||||||
|
else: |
||||||
|
globs.add(arg) |
||||||
|
|
||||||
|
return globs, include_auto |
||||||
|
|
||||||
|
|
||||||
|
def load_parsed_record(pyproject_record): |
||||||
|
parsed_record = {} |
||||||
|
with open(pyproject_record) as pyproject_record_file: |
||||||
|
content = json.load(pyproject_record_file) |
||||||
|
|
||||||
|
if len(content) > 1: |
||||||
|
raise FileExistsError("%pyproject install has found more than one *.dist-info/RECORD file. " |
||||||
|
"Currently, %pyproject_save_files supports only one wheel → one file list mapping. " |
||||||
|
"Feel free to open a bugzilla for pyproject-rpm-macros and describe your usecase.") |
||||||
|
|
||||||
|
# Redefine strings stored in JSON to BuildRootPaths |
||||||
|
for record_path, files in content.items(): |
||||||
|
parsed_record[BuildrootPath(record_path)] = [BuildrootPath(f) for f in files] |
||||||
|
|
||||||
|
return parsed_record |
||||||
|
|
||||||
|
|
||||||
|
def dist_metadata(buildroot, record_path): |
||||||
|
""" |
||||||
|
Returns distribution metadata (email.message.EmailMessage), possibly empty |
||||||
|
""" |
||||||
|
real_dist_path = record_path.parent.to_real(buildroot) |
||||||
|
dist = Distribution.at(real_dist_path) |
||||||
|
return dist.metadata |
||||||
|
|
||||||
|
|
||||||
|
def pyproject_save_files_and_modules(buildroot, sitelib, sitearch, python_version, pyproject_record, prefix, varargs): |
||||||
|
""" |
||||||
|
Takes arguments from the %{pyproject_save_files} macro |
||||||
|
|
||||||
|
Returns tuple: list of paths for the %files section and list of module names |
||||||
|
for the %check section |
||||||
|
""" |
||||||
|
# On 32 bit architectures, sitelib equals to sitearch |
||||||
|
# This saves us browsing one directory twice |
||||||
|
sitedirs = sorted({sitelib, sitearch}) |
||||||
|
|
||||||
|
globs, include_auto = parse_varargs(varargs) |
||||||
|
parsed_records = load_parsed_record(pyproject_record) |
||||||
|
|
||||||
|
final_file_list = [] |
||||||
|
all_module_names = set() |
||||||
|
|
||||||
|
for record_path, files in parsed_records.items(): |
||||||
|
metadata = dist_metadata(buildroot, record_path) |
||||||
|
paths_dict = classify_paths( |
||||||
|
record_path, files, metadata, sitedirs, python_version, prefix |
||||||
|
) |
||||||
|
|
||||||
|
final_file_list.extend( |
||||||
|
generate_file_list(paths_dict, globs, include_auto) |
||||||
|
) |
||||||
|
all_module_names.update(paths_dict["module_names"]) |
||||||
|
|
||||||
|
# Sort values, so they are always checked in the same order |
||||||
|
all_module_names = sorted(all_module_names) |
||||||
|
|
||||||
|
return final_file_list, all_module_names |
||||||
|
|
||||||
|
|
||||||
|
def main(cli_args): |
||||||
|
file_section, module_names = pyproject_save_files_and_modules( |
||||||
|
cli_args.buildroot, |
||||||
|
cli_args.sitelib, |
||||||
|
cli_args.sitearch, |
||||||
|
cli_args.python_version, |
||||||
|
cli_args.pyproject_record, |
||||||
|
cli_args.prefix, |
||||||
|
cli_args.varargs, |
||||||
|
) |
||||||
|
|
||||||
|
cli_args.output_files.write_text("\n".join(file_section) + "\n", encoding="utf-8") |
||||||
|
cli_args.output_modules.write_text("\n".join(module_names) + "\n", encoding="utf-8") |
||||||
|
|
||||||
|
|
||||||
|
def argparser(): |
||||||
|
parser = argparse.ArgumentParser() |
||||||
|
r = parser.add_argument_group("required arguments") |
||||||
|
r.add_argument("--output-files", type=PosixPath, required=True) |
||||||
|
r.add_argument("--output-modules", type=PosixPath, required=True) |
||||||
|
r.add_argument("--buildroot", type=PosixPath, required=True) |
||||||
|
r.add_argument("--sitelib", type=BuildrootPath, required=True) |
||||||
|
r.add_argument("--sitearch", type=BuildrootPath, required=True) |
||||||
|
r.add_argument("--python-version", type=str, required=True) |
||||||
|
r.add_argument("--pyproject-record", type=PosixPath, required=True) |
||||||
|
r.add_argument("--prefix", type=PosixPath, required=True) |
||||||
|
parser.add_argument("varargs", nargs="+") |
||||||
|
return parser |
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__": |
||||||
|
cli_args = argparser().parse_args() |
||||||
|
main(cli_args) |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,25 @@ |
|||||||
|
import sys |
||||||
|
import subprocess |
||||||
|
|
||||||
|
|
||||||
|
def build_wheel(*, wheeldir, stdout=None): |
||||||
|
command = ( |
||||||
|
sys.executable, |
||||||
|
'-m', 'pip', |
||||||
|
'wheel', |
||||||
|
'--wheel-dir', wheeldir, |
||||||
|
'--no-deps', |
||||||
|
'--use-pep517', |
||||||
|
'--no-build-isolation', |
||||||
|
'--disable-pip-version-check', |
||||||
|
'--no-clean', |
||||||
|
'--progress-bar', 'off', |
||||||
|
'--verbose', |
||||||
|
'.', |
||||||
|
) |
||||||
|
cp = subprocess.run(command, stdout=stdout) |
||||||
|
return cp.returncode |
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__': |
||||||
|
sys.exit(build_wheel(wheeldir=sys.argv[1])) |
@ -0,0 +1,27 @@ |
|||||||
|
%__python_provides() %{lua: |
||||||
|
-- Match buildroot/payload paths of the form |
||||||
|
-- /PATH/OF/BUILDROOT/usr/bin/pythonMAJOR.MINOR |
||||||
|
-- generating a line of the form |
||||||
|
-- python(abi) = MAJOR.MINOR |
||||||
|
-- (Don't match against -config tools e.g. /usr/bin/python2.6-config) |
||||||
|
local path = rpm.expand('%1') |
||||||
|
if path:match('/usr/bin/python%d+%.%d+$') then |
||||||
|
local provides = path:gsub('.*/usr/bin/python(%d+%.%d+)', 'python(abi) = %1') |
||||||
|
print(provides) |
||||||
|
end |
||||||
|
} |
||||||
|
|
||||||
|
%__python_requires() %{lua: |
||||||
|
-- Match buildroot paths of the form |
||||||
|
-- /PATH/OF/BUILDROOT/usr/lib/pythonMAJOR.MINOR/ and |
||||||
|
-- /PATH/OF/BUILDROOT/usr/lib64/pythonMAJOR.MINOR/ |
||||||
|
-- generating a line of the form: |
||||||
|
-- python(abi) = MAJOR.MINOR |
||||||
|
local path = rpm.expand('%1') |
||||||
|
if path:match('/usr/lib%d*/python%d+%.%d+/.*') then |
||||||
|
local requires = path:gsub('.*/usr/lib%d*/python(%d+%.%d+)/.*', 'python(abi) = %1') |
||||||
|
print(requires) |
||||||
|
end |
||||||
|
} |
||||||
|
|
||||||
|
%__python_path ^((%{_prefix}/lib(64)?/python[[:digit:]]+\\.[[:digit:]]+/.*\\.(py[oc]?|so))|(%{_bindir}/python[[:digit:]]+\\.[[:digit:]]+))$ |
@ -0,0 +1,91 @@ |
|||||||
|
#!/usr/bin/python3 -sB |
||||||
|
# (imports pythondistdeps from /usr/lib/rpm, hence -B) |
||||||
|
# |
||||||
|
# This program is free software. |
||||||
|
# |
||||||
|
# It is placed in the public domain or under the CC0-1.0-Universal license, |
||||||
|
# whichever is more permissive. |
||||||
|
# |
||||||
|
# Alternatively, it may be redistributed and/or modified under the terms of |
||||||
|
# the LGPL version 2.1 (or later) or GPL version 2 (or later). |
||||||
|
# |
||||||
|
# Use this script to generate bundled provides, e.g.: |
||||||
|
# ./pythonbundles.py setuptools-47.1.1/pkg_resources/_vendor/vendored.txt |
||||||
|
|
||||||
|
import pathlib |
||||||
|
import sys |
||||||
|
|
||||||
|
# inject parse_version import to pythondistdeps |
||||||
|
# not the nicest API, but :/ |
||||||
|
from pkg_resources import parse_version |
||||||
|
import pythondistdeps |
||||||
|
pythondistdeps.parse_version = parse_version |
||||||
|
|
||||||
|
|
||||||
|
def generate_bundled_provides(paths, namespace): |
||||||
|
provides = set() |
||||||
|
|
||||||
|
for path in paths: |
||||||
|
for line in path.read_text().splitlines(): |
||||||
|
line, _, comment = line.partition('#') |
||||||
|
if comment.startswith('egg='): |
||||||
|
# not a real comment |
||||||
|
# e.g. git+https://github.com/monty/spam.git@master#egg=spam&... |
||||||
|
egg, *_ = comment.strip().partition(' ') |
||||||
|
egg, *_ = egg.strip().partition('&') |
||||||
|
name = pythondistdeps.normalize_name(egg[4:]) |
||||||
|
provides.add(f'Provides: bundled({namespace}({name}))') |
||||||
|
continue |
||||||
|
line = line.strip() |
||||||
|
if line: |
||||||
|
name, _, version = line.partition('==') |
||||||
|
name = pythondistdeps.normalize_name(name) |
||||||
|
bundled_name = f"bundled({namespace}({name}))" |
||||||
|
python_provide = pythondistdeps.convert(bundled_name, '==', version) |
||||||
|
provides.add(f'Provides: {python_provide}') |
||||||
|
|
||||||
|
return provides |
||||||
|
|
||||||
|
|
||||||
|
def compare(expected, given): |
||||||
|
stripped = (l.strip() for l in given) |
||||||
|
no_comments = set(l for l in stripped if not l.startswith('#')) |
||||||
|
no_comments.discard('') |
||||||
|
if expected == no_comments: |
||||||
|
return True |
||||||
|
extra_expected = expected - no_comments |
||||||
|
extra_given = no_comments - expected |
||||||
|
if extra_expected: |
||||||
|
print('Missing expected provides:', file=sys.stderr) |
||||||
|
for provide in sorted(extra_expected): |
||||||
|
print(f' - {provide}', file=sys.stderr) |
||||||
|
if extra_given: |
||||||
|
print('Redundant unexpected provides:', file=sys.stderr) |
||||||
|
for provide in sorted(extra_given): |
||||||
|
print(f' + {provide}', file=sys.stderr) |
||||||
|
return False |
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__': |
||||||
|
import argparse |
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(prog=sys.argv[0], |
||||||
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter) |
||||||
|
parser.add_argument('vendored', metavar='VENDORED.TXT', nargs='+', type=pathlib.Path, |
||||||
|
help='Upstream information about vendored libraries') |
||||||
|
parser.add_argument('-c', '--compare-with', action='store', |
||||||
|
help='A string value to compare with and verify') |
||||||
|
parser.add_argument('-n', '--namespace', action='store', |
||||||
|
help='What namespace of provides will used', default='python3dist') |
||||||
|
args = parser.parse_args() |
||||||
|
|
||||||
|
provides = generate_bundled_provides(args.vendored, args.namespace) |
||||||
|
|
||||||
|
if args.compare_with: |
||||||
|
given = args.compare_with.splitlines() |
||||||
|
same = compare(provides, given) |
||||||
|
if not same: |
||||||
|
sys.exit(1) |
||||||
|
else: |
||||||
|
for provide in sorted(provides): |
||||||
|
print(provide) |
@ -0,0 +1,3 @@ |
|||||||
|
%__pythondist_provides %{_rpmconfigdir}/pythondistdeps.py --provides --normalized-names-format pep503 --package-name %{name} --majorver-provides-versions %{__default_python3_version} |
||||||
|
%__pythondist_requires %{_rpmconfigdir}/pythondistdeps.py --requires --normalized-names-format pep503 --package-name %{name} %{?!_python_no_extras_requires:--require-extras-subpackages} --console-scripts-nodep-setuptools-since 3.10 |
||||||
|
%__pythondist_path ^/usr/lib(64)?/python[3-9]\\.[[:digit:]]+/site-packages/[^/]+\\.(dist-info|egg-info|egg-link)$ |
@ -0,0 +1,608 @@ |
|||||||
|
#!/usr/bin/python3 -s |
||||||
|
# -*- coding: utf-8 -*- |
||||||
|
# |
||||||
|
# Copyright 2010 Per Øyvind Karlsen <proyvind@moondrake.org> |
||||||
|
# Copyright 2015 Neal Gompa <ngompa13@gmail.com> |
||||||
|
# Copyright 2020 SUSE LLC |
||||||
|
# |
||||||
|
# This program is free software. It may be redistributed and/or modified under |
||||||
|
# the terms of the LGPL version 2.1 (or later). |
||||||
|
# |
||||||
|
# RPM python dependency generator, using .egg-info/.egg-link/.dist-info data |
||||||
|
# |
||||||
|
|
||||||
|
from __future__ import print_function |
||||||
|
import argparse |
||||||
|
from os.path import dirname, sep |
||||||
|
import re |
||||||
|
from sys import argv, stdin, stderr, version_info |
||||||
|
from sysconfig import get_path |
||||||
|
from warnings import warn |
||||||
|
|
||||||
|
from packaging.requirements import Requirement as Requirement_ |
||||||
|
from packaging.version import parse |
||||||
|
import packaging.markers |
||||||
|
|
||||||
|
# Monkey patching packaging.markers to handle extras names in a |
||||||
|
# case-insensitive manner: |
||||||
|
# pip considers dnspython[DNSSEC] and dnspython[dnssec] to be equal, but |
||||||
|
# packaging markers treat extras in a case-sensitive manner. To solve this |
||||||
|
# issue, we introduce a comparison operator that compares case-insensitively |
||||||
|
# if both sides of the comparison are strings. And then we inject this |
||||||
|
# operator into packaging.markers to be used when comparing names of extras. |
||||||
|
# Fedora BZ: https://bugzilla.redhat.com/show_bug.cgi?id=1936875 |
||||||
|
# Upstream issue: https://discuss.python.org/t/what-extras-names-are-treated-as-equal-and-why/7614 |
||||||
|
# - After it's established upstream what is the canonical form of an extras |
||||||
|
# name, we plan to open an issue with packaging to hopefully solve this |
||||||
|
# there without having to resort to monkeypatching. |
||||||
|
def str_lower_eq(a, b): |
||||||
|
if isinstance(a, str) and isinstance(b, str): |
||||||
|
return a.lower() == b.lower() |
||||||
|
else: |
||||||
|
return a == b |
||||||
|
packaging.markers._operators["=="] = str_lower_eq |
||||||
|
|
||||||
|
try: |
||||||
|
from importlib.metadata import PathDistribution |
||||||
|
except ImportError: |
||||||
|
from importlib_metadata import PathDistribution |
||||||
|
|
||||||
|
try: |
||||||
|
from pathlib import Path |
||||||
|
except ImportError: |
||||||
|
from pathlib2 import Path |
||||||
|
|
||||||
|
|
||||||
|
def normalize_name(name): |
||||||
|
"""https://www.python.org/dev/peps/pep-0503/#normalized-names""" |
||||||
|
return re.sub(r'[-_.]+', '-', name).lower() |
||||||
|
|
||||||
|
|
||||||
|
def legacy_normalize_name(name): |
||||||
|
"""Like pkg_resources Distribution.key property""" |
||||||
|
return re.sub(r'[-_]+', '-', name).lower() |
||||||
|
|
||||||
|
|
||||||
|
class Requirement(Requirement_): |
||||||
|
def __init__(self, requirement_string): |
||||||
|
super(Requirement, self).__init__(requirement_string) |
||||||
|
self.normalized_name = normalize_name(self.name) |
||||||
|
self.legacy_normalized_name = legacy_normalize_name(self.name) |
||||||
|
|
||||||
|
|
||||||
|
class Distribution(PathDistribution): |
||||||
|
def __init__(self, path): |
||||||
|
super(Distribution, self).__init__(Path(path)) |
||||||
|
|
||||||
|
# Check that the initialization went well and metadata are not missing or corrupted |
||||||
|
# name is the most important attribute, if it doesn't exist, import failed |
||||||
|
if not self.name or not isinstance(self.name, str): |
||||||
|
print("*** PYTHON_METADATA_FAILED_TO_PARSE_ERROR___SEE_STDERR ***") |
||||||
|
print('Error: Python metadata at `{}` are missing or corrupted.'.format(path), file=stderr) |
||||||
|
exit(65) # os.EX_DATAERR |
||||||
|
|
||||||
|
self.normalized_name = normalize_name(self.name) |
||||||
|
self.legacy_normalized_name = legacy_normalize_name(self.name) |
||||||
|
self.requirements = [Requirement(r) for r in self.requires or []] |
||||||
|
self.extras = [ |
||||||
|
v.lower() for k, v in self.metadata.items() if k == 'Provides-Extra'] |
||||||
|
self.py_version = self._parse_py_version(path) |
||||||
|
|
||||||
|
# `name` is defined as a property exactly like this in Python 3.10 in the |
||||||
|
# PathDistribution class. Due to that we can't redefine `name` as a normal |
||||||
|
# attribute. So we copied the Python 3.10 definition here into the code so |
||||||
|
# that it works also on previous Python/importlib_metadata versions. |
||||||
|
@property |
||||||
|
def name(self): |
||||||
|
"""Return the 'Name' metadata for the distribution package.""" |
||||||
|
return self.metadata['Name'] |
||||||
|
|
||||||
|
def _parse_py_version(self, path): |
||||||
|
# Try to parse the Python version from the path the metadata |
||||||
|
# resides at (e.g. /usr/lib/pythonX.Y/site-packages/...) |
||||||
|
res = re.search(r"/python(?P<pyver>\d+\.\d+)/", path) |
||||||
|
if res: |
||||||
|
return res.group('pyver') |
||||||
|
# If that hasn't worked, attempt to parse it from the metadata |
||||||
|
# directory name |
||||||
|
res = re.search(r"-py(?P<pyver>\d+.\d+)[.-]egg-info$", path) |
||||||
|
if res: |
||||||
|
return res.group('pyver') |
||||||
|
return None |
||||||
|
|
||||||
|
def requirements_for_extra(self, extra): |
||||||
|
extra_deps = [] |
||||||
|
# we are only interested in dependencies with extra == 'our_extra' marker |
||||||
|
for req in self.requirements: |
||||||
|
# no marker at all, nothing to evaluate |
||||||
|
if not req.marker: |
||||||
|
continue |
||||||
|
# does the marker include extra == 'our_extra'? |
||||||
|
# we can only evaluate the marker as a whole, |
||||||
|
# so we evaluate it twice (using 2 different marker_envs) |
||||||
|
# and see if it only evaluates to True with our extra |
||||||
|
if (req.marker.evaluate(get_marker_env(self, extra)) and |
||||||
|
not req.marker.evaluate(get_marker_env(self, None))): |
||||||
|
extra_deps.append(req) |
||||||
|
return extra_deps |
||||||
|
|
||||||
|
def __repr__(self): |
||||||
|
return '{} from {}'.format(self.name, self._path) |
||||||
|
|
||||||
|
|
||||||
|
class RpmVersion(): |
||||||
|
def __init__(self, version_id): |
||||||
|
version = parse(version_id) |
||||||
|
if isinstance(version._version, str): |
||||||
|
self.version = version._version |
||||||
|
else: |
||||||
|
self.epoch = version._version.epoch |
||||||
|
self.version = list(version._version.release) |
||||||
|
self.pre = version._version.pre |
||||||
|
self.dev = version._version.dev |
||||||
|
self.post = version._version.post |
||||||
|
# version.local is ignored as it is not expected to appear |
||||||
|
# in public releases |
||||||
|
# https://www.python.org/dev/peps/pep-0440/#local-version-identifiers |
||||||
|
|
||||||
|
def is_legacy(self): |
||||||
|
return isinstance(self.version, str) |
||||||
|
|
||||||
|
def increment(self): |
||||||
|
self.version[-1] += 1 |
||||||
|
self.pre = None |
||||||
|
self.dev = None |
||||||
|
self.post = None |
||||||
|
return self |
||||||
|
|
||||||
|
def __str__(self): |
||||||
|
if self.is_legacy(): |
||||||
|
return self.version |
||||||
|
if self.epoch: |
||||||
|
rpm_epoch = str(self.epoch) + ':' |
||||||
|
else: |
||||||
|
rpm_epoch = '' |
||||||
|
while len(self.version) > 1 and self.version[-1] == 0: |
||||||
|
self.version.pop() |
||||||
|
rpm_version = '.'.join(str(x) for x in self.version) |
||||||
|
if self.pre: |
||||||
|
rpm_suffix = '~{}'.format(''.join(str(x) for x in self.pre)) |
||||||
|
elif self.dev: |
||||||
|
rpm_suffix = '~~{}'.format(''.join(str(x) for x in self.dev)) |
||||||
|
elif self.post: |
||||||
|
rpm_suffix = '^post{}'.format(self.post[1]) |
||||||
|
else: |
||||||
|
rpm_suffix = '' |
||||||
|
return '{}{}{}'.format(rpm_epoch, rpm_version, rpm_suffix) |
||||||
|
|
||||||
|
|
||||||
|
def convert_compatible(name, operator, version_id): |
||||||
|
if version_id.endswith('.*'): |
||||||
|
print("*** INVALID_REQUIREMENT_ERROR___SEE_STDERR ***") |
||||||
|
print('Invalid requirement: {} {} {}'.format(name, operator, version_id), file=stderr) |
||||||
|
exit(65) # os.EX_DATAERR |
||||||
|
version = RpmVersion(version_id) |
||||||
|
if version.is_legacy(): |
||||||
|
# LegacyVersions are not supported in this context |
||||||
|
print("*** INVALID_REQUIREMENT_ERROR___SEE_STDERR ***") |
||||||
|
print('Invalid requirement: {} {} {}'.format(name, operator, version_id), file=stderr) |
||||||
|
exit(65) # os.EX_DATAERR |
||||||
|
if len(version.version) == 1: |
||||||
|
print("*** INVALID_REQUIREMENT_ERROR___SEE_STDERR ***") |
||||||
|
print('Invalid requirement: {} {} {}'.format(name, operator, version_id), file=stderr) |
||||||
|
exit(65) # os.EX_DATAERR |
||||||
|
upper_version = RpmVersion(version_id) |
||||||
|
upper_version.version.pop() |
||||||
|
upper_version.increment() |
||||||
|
return '({} >= {} with {} < {})'.format( |
||||||
|
name, version, name, upper_version) |
||||||
|
|
||||||
|
|
||||||
|
def convert_equal(name, operator, version_id): |
||||||
|
if version_id.endswith('.*'): |
||||||
|
version_id = version_id[:-2] + '.0' |
||||||
|
return convert_compatible(name, '~=', version_id) |
||||||
|
version = RpmVersion(version_id) |
||||||
|
return '{} = {}'.format(name, version) |
||||||
|
|
||||||
|
|
||||||
|
def convert_arbitrary_equal(name, operator, version_id): |
||||||
|
if version_id.endswith('.*'): |
||||||
|
print("*** INVALID_REQUIREMENT_ERROR___SEE_STDERR ***") |
||||||
|
print('Invalid requirement: {} {} {}'.format(name, operator, version_id), file=stderr) |
||||||
|
exit(65) # os.EX_DATAERR |
||||||
|
version = RpmVersion(version_id) |
||||||
|
return '{} = {}'.format(name, version) |
||||||
|
|
||||||
|
|
||||||
|
def convert_not_equal(name, operator, version_id): |
||||||
|
if version_id.endswith('.*'): |
||||||
|
version_id = version_id[:-2] |
||||||
|
version = RpmVersion(version_id) |
||||||
|
if version.is_legacy(): |
||||||
|
# LegacyVersions are not supported in this context |
||||||
|
print("*** INVALID_REQUIREMENT_ERROR___SEE_STDERR ***") |
||||||
|
print('Invalid requirement: {} {} {}'.format(name, operator, version_id), file=stderr) |
||||||
|
exit(65) # os.EX_DATAERR |
||||||
|
version_gt = RpmVersion(version_id).increment() |
||||||
|
version_gt_operator = '>=' |
||||||
|
# Prevent dev and pre-releases from satisfying a < requirement |
||||||
|
version = '{}~~'.format(version) |
||||||
|
else: |
||||||
|
version = RpmVersion(version_id) |
||||||
|
version_gt = version |
||||||
|
version_gt_operator = '>' |
||||||
|
return '({} < {} or {} {} {})'.format( |
||||||
|
name, version, name, version_gt_operator, version_gt) |
||||||
|
|
||||||
|
|
||||||
|
def convert_ordered(name, operator, version_id): |
||||||
|
if version_id.endswith('.*'): |
||||||
|
# PEP 440 does not define semantics for prefix matching |
||||||
|
# with ordered comparisons |
||||||
|
# see: https://github.com/pypa/packaging/issues/320 |
||||||
|
# and: https://github.com/pypa/packaging/issues/321 |
||||||
|
# This style of specifier is officially "unsupported", |
||||||
|
# even though it is processed. Support may be removed |
||||||
|
# in version 21.0. |
||||||
|
version_id = version_id[:-2] |
||||||
|
version = RpmVersion(version_id) |
||||||
|
if operator == '>': |
||||||
|
# distutils will allow a prefix match with '>' |
||||||
|
operator = '>=' |
||||||
|
if operator == '<=': |
||||||
|
# distutils will not allow a prefix match with '<=' |
||||||
|
operator = '<' |
||||||
|
else: |
||||||
|
version = RpmVersion(version_id) |
||||||
|
# For backwards compatibility, fallback to previous behavior with LegacyVersions |
||||||
|
if not version.is_legacy(): |
||||||
|
# Prevent dev and pre-releases from satisfying a < requirement |
||||||
|
if operator == '<' and not version.pre and not version.dev and not version.post: |
||||||
|
version = '{}~~'.format(version) |
||||||
|
# Prevent post-releases from satisfying a > requirement |
||||||
|
if operator == '>' and not version.pre and not version.dev and not version.post: |
||||||
|
version = '{}.0'.format(version) |
||||||
|
return '{} {} {}'.format(name, operator, version) |
||||||
|
|
||||||
|
|
||||||
|
OPERATORS = {'~=': convert_compatible, |
||||||
|
'==': convert_equal, |
||||||
|
'===': convert_arbitrary_equal, |
||||||
|
'!=': convert_not_equal, |
||||||
|
'<=': convert_ordered, |
||||||
|
'<': convert_ordered, |
||||||
|
'>=': convert_ordered, |
||||||
|
'>': convert_ordered} |
||||||
|
|
||||||
|
|
||||||
|
def convert(name, operator, version_id): |
||||||
|
try: |
||||||
|
return OPERATORS[operator](name, operator, version_id) |
||||||
|
except Exception as exc: |
||||||
|
raise RuntimeError("Cannot process Python package version `{}` for name `{}`". |
||||||
|
format(version_id, name)) from exc |
||||||
|
|
||||||
|
|
||||||
|
def get_marker_env(dist, extra): |
||||||
|
# packaging uses a default environment using |
||||||
|
# platform.python_version to evaluate if a dependency is relevant |
||||||
|
# based on environment markers [1], |
||||||
|
# e.g. requirement `argparse;python_version<"2.7"` |
||||||
|
# |
||||||
|
# Since we're running this script on one Python version while |
||||||
|
# possibly evaluating packages for different versions, we |
||||||
|
# set up an environment with the version we want to evaluate. |
||||||
|
# |
||||||
|
# [1] https://www.python.org/dev/peps/pep-0508/#environment-markers |
||||||
|
return {"python_full_version": dist.py_version, |
||||||
|
"python_version": dist.py_version, |
||||||
|
"extra": extra} |
||||||
|
|
||||||
|
|
||||||
|
def main(): |
||||||
|
"""To allow this script to be importable (and its classes/functions |
||||||
|
reused), actions are defined in the main function and are performed only |
||||||
|
when run as a main script.""" |
||||||
|
parser = argparse.ArgumentParser(prog=argv[0]) |
||||||
|
group = parser.add_mutually_exclusive_group(required=True) |
||||||
|
group.add_argument('-P', '--provides', action='store_true', help='Print Provides') |
||||||
|
group.add_argument('-R', '--requires', action='store_true', help='Print Requires') |
||||||
|
group.add_argument('-r', '--recommends', action='store_true', help='Print Recommends') |
||||||
|
group.add_argument('-C', '--conflicts', action='store_true', help='Print Conflicts') |
||||||
|
group.add_argument('-E', '--extras', action='store_true', help='[Unused] Generate spec file snippets for extras subpackages') |
||||||
|
group_majorver = parser.add_mutually_exclusive_group() |
||||||
|
group_majorver.add_argument('-M', '--majorver-provides', action='store_true', help='Print extra Provides with Python major version only') |
||||||
|
group_majorver.add_argument('--majorver-provides-versions', action='append', |
||||||
|
help='Print extra Provides with Python major version only for listed ' |
||||||
|
'Python VERSIONS (appended or comma separated without spaces, e.g. 2.7,3.9)') |
||||||
|
parser.add_argument('-m', '--majorver-only', action='store_true', help='Print Provides/Requires with Python major version only') |
||||||
|
parser.add_argument('-n', '--normalized-names-format', action='store', |
||||||
|
default="legacy-dots", choices=["pep503", "legacy-dots"], |
||||||
|
help='Format of normalized names according to pep503 or legacy format that allows dots [default]') |
||||||
|
parser.add_argument('--normalized-names-provide-both', action='store_true', |
||||||
|
help='Provide both `pep503` and `legacy-dots` format of normalized names (useful for a transition period)') |
||||||
|
parser.add_argument('-L', '--legacy-provides', action='store_true', help='Print extra legacy pythonegg Provides') |
||||||
|
parser.add_argument('-l', '--legacy', action='store_true', help='Print legacy pythonegg Provides/Requires instead') |
||||||
|
parser.add_argument('--console-scripts-nodep-setuptools-since', action='store', |
||||||
|
help='An optional Python version (X.Y), at least 3.8. ' |
||||||
|
'For that version and any newer version, ' |
||||||
|
'a dependency on "setuptools" WILL NOT be generated for packages with console_scripts/gui_scripts entry points. ' |
||||||
|
'By setting this flag, you guarantee that setuptools >= 47.2.0 is used ' |
||||||
|
'during the build of packages for this and any newer Python version.') |
||||||
|
parser.add_argument('--require-extras-subpackages', action='store_true', |
||||||
|
help="If there is a dependency on a package with extras functionality, require the extras subpackage") |
||||||
|
parser.add_argument('--package-name', action='store', help="Name of the RPM package that's being inspected. Required for extras requires/provides to work.") |
||||||
|
parser.add_argument('--namespace', action='store', help="Namespace for the printed Requires, Provides, Recommends and Conflicts") |
||||||
|
parser.add_argument('files', nargs=argparse.REMAINDER, help="Files from the RPM package that are to be inspected, can also be supplied on stdin") |
||||||
|
args = parser.parse_args() |
||||||
|
|
||||||
|
py_abi = args.requires |
||||||
|
py_deps = {} |
||||||
|
|
||||||
|
if args.majorver_provides_versions: |
||||||
|
# Go through the arguments (can be specified multiple times), |
||||||
|
# and parse individual versions (can be comma-separated) |
||||||
|
args.majorver_provides_versions = [v for vstring in args.majorver_provides_versions |
||||||
|
for v in vstring.split(",")] |
||||||
|
|
||||||
|
# If normalized_names_require_pep503 is True we require the pep503 |
||||||
|
# normalized name, if it is False we provide the legacy normalized name |
||||||
|
normalized_names_require_pep503 = args.normalized_names_format == "pep503" |
||||||
|
|
||||||
|
# If normalized_names_provide_pep503/legacy is True we provide the |
||||||
|
# pep503/legacy normalized name, if it is False we don't |
||||||
|
normalized_names_provide_pep503 = \ |
||||||
|
args.normalized_names_format == "pep503" or args.normalized_names_provide_both |
||||||
|
normalized_names_provide_legacy = \ |
||||||
|
args.normalized_names_format == "legacy-dots" or args.normalized_names_provide_both |
||||||
|
|
||||||
|
# At least one type of normalization must be provided |
||||||
|
assert normalized_names_provide_pep503 or normalized_names_provide_legacy |
||||||
|
|
||||||
|
if args.console_scripts_nodep_setuptools_since: |
||||||
|
nodep_setuptools_pyversion = parse(args.console_scripts_nodep_setuptools_since) |
||||||
|
if nodep_setuptools_pyversion < parse("3.8"): |
||||||
|
print("Only version 3.8+ is supported in --console-scripts-nodep-setuptools-since", file=stderr) |
||||||
|
print("*** PYTHON_EXTRAS_ARGUMENT_ERROR___SEE_STDERR ***") |
||||||
|
exit(65) # os.EX_DATAERR |
||||||
|
else: |
||||||
|
nodep_setuptools_pyversion = None |
||||||
|
|
||||||
|
# Is this script being run for an extras subpackage? |
||||||
|
extras_subpackage = None |
||||||
|
if args.package_name and '+' in args.package_name: |
||||||
|
# The extras names are encoded in the package names after the + sign. |
||||||
|
# We take the part after the rightmost +, ignoring when empty, |
||||||
|
# this allows packages like nicotine+ or c++ to work fine. |
||||||
|
# While packages with names like +spam or foo+bar would break, |
||||||
|
# names started with the plus sign are not very common |
||||||
|
# and pluses in the middle can be easily replaced with dashes. |
||||||
|
# Python extras names don't contain pluses according to PEP 508. |
||||||
|
package_name_parts = args.package_name.rpartition('+') |
||||||
|
extras_subpackage = package_name_parts[2].lower() or None |
||||||
|
|
||||||
|
namespace = (args.namespace + "({})") if args.namespace else "{}" |
||||||
|
|
||||||
|
for f in (args.files or stdin.readlines()): |
||||||
|
f = f.strip() |
||||||
|
lower = f.lower() |
||||||
|
name = 'python(abi)' |
||||||
|
# add dependency based on path, versioned if within versioned python directory |
||||||
|
if py_abi and (lower.endswith('.py') or lower.endswith('.pyc') or lower.endswith('.pyo')): |
||||||
|
if name not in py_deps: |
||||||
|
py_deps[name] = [] |
||||||
|
running_python_version = '{}.{}'.format(*version_info[:2]) |
||||||
|
purelib = get_path('purelib').split(running_python_version)[0] |
||||||
|
platlib = get_path('platlib').split(running_python_version)[0] |
||||||
|
for lib in (purelib, platlib): |
||||||
|
if lib in f: |
||||||
|
spec = ('==', f.split(lib)[1].split(sep)[0]) |
||||||
|
if spec not in py_deps[name]: |
||||||
|
py_deps[name].append(spec) |
||||||
|
|
||||||
|
# XXX: hack to workaround RPM internal dependency generator not passing directories |
||||||
|
lower_dir = dirname(lower) |
||||||
|
if lower_dir.endswith('.egg') or \ |
||||||
|
lower_dir.endswith('.egg-info') or \ |
||||||
|
lower_dir.endswith('.dist-info'): |
||||||
|
lower = lower_dir |
||||||
|
f = dirname(f) |
||||||
|
# Determine provide, requires, conflicts & recommends based on egg/dist metadata |
||||||
|
if lower.endswith('.egg') or \ |
||||||
|
lower.endswith('.egg-info') or \ |
||||||
|
lower.endswith('.dist-info'): |
||||||
|
dist = Distribution(f) |
||||||
|
if not dist.py_version: |
||||||
|
warn("Version for {!r} has not been found".format(dist), RuntimeWarning) |
||||||
|
continue |
||||||
|
|
||||||
|
# If processing an extras subpackage: |
||||||
|
# Check that the extras name is declared in the metadata, or |
||||||
|
# that there are some dependencies associated with the extras |
||||||
|
# name in the requires.txt (this is an outdated way to declare |
||||||
|
# extras packages). |
||||||
|
# - If there is an extras package declared only in requires.txt |
||||||
|
# without any dependencies, this check will fail. In that case |
||||||
|
# make sure to use updated metadata and declare the extras |
||||||
|
# package there. |
||||||
|
if extras_subpackage and extras_subpackage not in dist.extras and not dist.requirements_for_extra(extras_subpackage): |
||||||
|
print("*** PYTHON_EXTRAS_NOT_FOUND_ERROR___SEE_STDERR ***") |
||||||
|
print(f"\nError: The package name contains an extras name `{extras_subpackage}` that was not found in the metadata.\n" |
||||||
|
"Check if the extras were removed from the project. If so, consider removing the subpackage and obsoleting it from another.\n", file=stderr) |
||||||
|
exit(65) # os.EX_DATAERR |
||||||
|
|
||||||
|
if args.majorver_provides or args.majorver_provides_versions or \ |
||||||
|
args.majorver_only or args.legacy_provides or args.legacy: |
||||||
|
# Get the Python major version |
||||||
|
pyver_major = dist.py_version.split('.')[0] |
||||||
|
if args.provides: |
||||||
|
extras_suffix = f"[{extras_subpackage}]" if extras_subpackage else "" |
||||||
|
# If egg/dist metadata says package name is python, we provide python(abi) |
||||||
|
if dist.normalized_name == 'python': |
||||||
|
name = namespace.format('python(abi)') |
||||||
|
if name not in py_deps: |
||||||
|
py_deps[name] = [] |
||||||
|
py_deps[name].append(('==', dist.py_version)) |
||||||
|
if not args.legacy or not args.majorver_only: |
||||||
|
if normalized_names_provide_legacy: |
||||||
|
name = namespace.format('python{}dist({}{})').format(dist.py_version, dist.legacy_normalized_name, extras_suffix) |
||||||
|
if name not in py_deps: |
||||||
|
py_deps[name] = [] |
||||||
|
if normalized_names_provide_pep503: |
||||||
|
name_ = namespace.format('python{}dist({}{})').format(dist.py_version, dist.normalized_name, extras_suffix) |
||||||
|
if name_ not in py_deps: |
||||||
|
py_deps[name_] = [] |
||||||
|
if args.majorver_provides or args.majorver_only or \ |
||||||
|
(args.majorver_provides_versions and dist.py_version in args.majorver_provides_versions): |
||||||
|
if normalized_names_provide_legacy: |
||||||
|
pymajor_name = namespace.format('python{}dist({}{})').format(pyver_major, dist.legacy_normalized_name, extras_suffix) |
||||||
|
if pymajor_name not in py_deps: |
||||||
|
py_deps[pymajor_name] = [] |
||||||
|
if normalized_names_provide_pep503: |
||||||
|
pymajor_name_ = namespace.format('python{}dist({}{})').format(pyver_major, dist.normalized_name, extras_suffix) |
||||||
|
if pymajor_name_ not in py_deps: |
||||||
|
py_deps[pymajor_name_] = [] |
||||||
|
if args.legacy or args.legacy_provides: |
||||||
|
legacy_name = namespace.format('pythonegg({})({})').format(pyver_major, dist.legacy_normalized_name) |
||||||
|
if legacy_name not in py_deps: |
||||||
|
py_deps[legacy_name] = [] |
||||||
|
if dist.version: |
||||||
|
version = dist.version |
||||||
|
spec = ('==', version) |
||||||
|
|
||||||
|
if normalized_names_provide_legacy: |
||||||
|
if spec not in py_deps[name]: |
||||||
|
py_deps[name].append(spec) |
||||||
|
if args.majorver_provides or \ |
||||||
|
(args.majorver_provides_versions and dist.py_version in args.majorver_provides_versions): |
||||||
|
py_deps[pymajor_name].append(spec) |
||||||
|
if normalized_names_provide_pep503: |
||||||
|
if spec not in py_deps[name_]: |
||||||
|
py_deps[name_].append(spec) |
||||||
|
if args.majorver_provides or \ |
||||||
|
(args.majorver_provides_versions and dist.py_version in args.majorver_provides_versions): |
||||||
|
py_deps[pymajor_name_].append(spec) |
||||||
|
if args.legacy or args.legacy_provides: |
||||||
|
if spec not in py_deps[legacy_name]: |
||||||
|
py_deps[legacy_name].append(spec) |
||||||
|
if args.requires or (args.recommends and dist.extras): |
||||||
|
name = namespace.format('python(abi)') |
||||||
|
# If egg/dist metadata says package name is python, we don't add dependency on python(abi) |
||||||
|
if dist.normalized_name == 'python': |
||||||
|
py_abi = False |
||||||
|
if name in py_deps: |
||||||
|
py_deps.pop(name) |
||||||
|
elif py_abi and dist.py_version: |
||||||
|
if name not in py_deps: |
||||||
|
py_deps[name] = [] |
||||||
|
spec = ('==', dist.py_version) |
||||||
|
if spec not in py_deps[name]: |
||||||
|
py_deps[name].append(spec) |
||||||
|
|
||||||
|
if extras_subpackage: |
||||||
|
deps = [d for d in dist.requirements_for_extra(extras_subpackage)] |
||||||
|
else: |
||||||
|
deps = dist.requirements |
||||||
|
|
||||||
|
# console_scripts/gui_scripts entry points needed pkg_resources from setuptools |
||||||
|
# on new Python/setuptools versions, this is no longer required |
||||||
|
if nodep_setuptools_pyversion is None or parse(dist.py_version) < nodep_setuptools_pyversion: |
||||||
|
if (dist.entry_points and |
||||||
|
(lower.endswith('.egg') or |
||||||
|
lower.endswith('.egg-info'))): |
||||||
|
groups = {ep.group for ep in dist.entry_points} |
||||||
|
if {"console_scripts", "gui_scripts"} & groups: |
||||||
|
# stick them first so any more specific requirement |
||||||
|
# overrides it |
||||||
|
deps.insert(0, Requirement('setuptools')) |
||||||
|
# add requires/recommends based on egg/dist metadata |
||||||
|
for dep in deps: |
||||||
|
# Even if we're requiring `foo[bar]`, also require `foo` |
||||||
|
# to be safe, and to make it discoverable through |
||||||
|
# `repoquery --whatrequires` |
||||||
|
extras_suffixes = [""] |
||||||
|
if args.require_extras_subpackages and dep.extras: |
||||||
|
# A dependency can have more than one extras, |
||||||
|
# i.e. foo[bar,baz], so let's go through all of them |
||||||
|
extras_suffixes += [f"[{e.lower()}]" for e in dep.extras] |
||||||
|
|
||||||
|
for extras_suffix in extras_suffixes: |
||||||
|
if normalized_names_require_pep503: |
||||||
|
dep_normalized_name = dep.normalized_name |
||||||
|
else: |
||||||
|
dep_normalized_name = dep.legacy_normalized_name |
||||||
|
|
||||||
|
if args.legacy: |
||||||
|
name = namespace.format('pythonegg({})({})').format(pyver_major, dep.legacy_normalized_name) |
||||||
|
else: |
||||||
|
if args.majorver_only: |
||||||
|
name = namespace.format('python{}dist({}{})').format(pyver_major, dep_normalized_name, extras_suffix) |
||||||
|
else: |
||||||
|
name = namespace.format('python{}dist({}{})').format(dist.py_version, dep_normalized_name, extras_suffix) |
||||||
|
|
||||||
|
if dep.marker and not args.recommends and not extras_subpackage: |
||||||
|
if not dep.marker.evaluate(get_marker_env(dist, '')): |
||||||
|
continue |
||||||
|
|
||||||
|
if name not in py_deps: |
||||||
|
py_deps[name] = [] |
||||||
|
for spec in dep.specifier: |
||||||
|
if (spec.operator, spec.version) not in py_deps[name]: |
||||||
|
py_deps[name].append((spec.operator, spec.version)) |
||||||
|
|
||||||
|
# Unused, for automatic sub-package generation based on 'extras' from egg/dist metadata |
||||||
|
# TODO: implement in rpm later, or...? |
||||||
|
if args.extras: |
||||||
|
print(dist.extras) |
||||||
|
for extra in dist.extras: |
||||||
|
print('%%package\textras-{}'.format(extra)) |
||||||
|
print('Summary:\t{} extra for {} python package'.format(extra, dist.legacy_normalized_name)) |
||||||
|
print('Group:\t\tDevelopment/Python') |
||||||
|
for dep in dist.requirements_for_extra(extra): |
||||||
|
for spec in dep.specifier: |
||||||
|
if spec.operator == '!=': |
||||||
|
print('Conflicts:\t{} {} {}'.format(dep.legacy_normalized_name, '==', spec.version)) |
||||||
|
else: |
||||||
|
print('Requires:\t{} {} {}'.format(dep.legacy_normalized_name, spec.operator, spec.version)) |
||||||
|
print('%%description\t{}'.format(extra)) |
||||||
|
print('{} extra for {} python package'.format(extra, dist.legacy_normalized_name)) |
||||||
|
print('%%files\t\textras-{}\n'.format(extra)) |
||||||
|
if args.conflicts: |
||||||
|
# Should we really add conflicts for extras? |
||||||
|
# Creating a meta package per extra with recommends on, which has |
||||||
|
# the requires/conflicts in stead might be a better solution... |
||||||
|
for dep in dist.requirements: |
||||||
|
for spec in dep.specifier: |
||||||
|
if spec.operator == '!=': |
||||||
|
if dep.legacy_normalized_name not in py_deps: |
||||||
|
py_deps[dep.legacy_normalized_name] = [] |
||||||
|
spec = ('==', spec.version) |
||||||
|
if spec not in py_deps[dep.legacy_normalized_name]: |
||||||
|
py_deps[dep.legacy_normalized_name].append(spec) |
||||||
|
|
||||||
|
for name in sorted(py_deps): |
||||||
|
if py_deps[name]: |
||||||
|
# Print out versioned provides, requires, recommends, conflicts |
||||||
|
spec_list = [] |
||||||
|
for spec in py_deps[name]: |
||||||
|
spec_list.append(convert(name, spec[0], spec[1])) |
||||||
|
if len(spec_list) == 1: |
||||||
|
print(spec_list[0]) |
||||||
|
else: |
||||||
|
# Sort spec_list so that the results can be tested easily |
||||||
|
print('({})'.format(' with '.join(sorted(spec_list)))) |
||||||
|
else: |
||||||
|
# Print out unversioned provides, requires, recommends, conflicts |
||||||
|
print(name) |
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__": |
||||||
|
"""To allow this script to be importable (and its classes/functions |
||||||
|
reused), actions are performed only when run as a main script.""" |
||||||
|
try: |
||||||
|
main() |
||||||
|
except Exception as exc: |
||||||
|
print("*** PYTHONDISTDEPS_GENERATORS_FAILED ***", flush=True) |
||||||
|
raise RuntimeError("Error: pythondistdeps.py generator encountered an unhandled exception and was terminated.") from exc |
||||||
|
|
@ -0,0 +1,42 @@ |
|||||||
|
%__pythonname_provides() %{lua: |
||||||
|
local python = require 'fedora.srpm.python' |
||||||
|
-- this macro is called for each file in a package, the path being in %1 |
||||||
|
-- but we don't need to know the path, so we would get for each file: Macro %1 defined but not used within scope |
||||||
|
-- in here, we expand %name conditionally on %1 to suppress the warning |
||||||
|
local name = rpm.expand('%{?1:%{name}}') |
||||||
|
local evr = rpm.expand('%{?epoch:%{epoch}:}%{version}-%{release}') |
||||||
|
local provides = python.python_altprovides_once(name, evr) |
||||||
|
-- provides is either an array/table or nil |
||||||
|
-- nil means the function was already called with the same arguments: |
||||||
|
-- either with another file in %1 or manually via %py_provides |
||||||
|
if provides then |
||||||
|
for i, provide in ipairs(provides) do |
||||||
|
print(provide .. ' ') |
||||||
|
end |
||||||
|
end |
||||||
|
} |
||||||
|
|
||||||
|
%__pythonname_obsoletes() %{?rhel:%{lua: |
||||||
|
-- On CentOS/RHEL we automatically generate Obsoletes tags in the form: |
||||||
|
-- package python3-foo -> Obsoletes: python3.XY-foo |
||||||
|
-- This provides a clean upgrade path between major versions of CentOS/RHEL. |
||||||
|
-- In Fedora this is not needed as we don't ship ecosystem packages |
||||||
|
-- for alternative Python interpreters. |
||||||
|
local python = require 'fedora.srpm.python' |
||||||
|
-- this macro is called for each file in a package, the path being in %1 |
||||||
|
-- but we don't need to know the path, so we would get for each file: Macro %1 defined but not used within scope |
||||||
|
-- in here, we expand %name conditionally on %1 to suppress the warning |
||||||
|
local name = rpm.expand('%{?1:%{name}}') |
||||||
|
local evr = rpm.expand('%{?epoch:%{epoch}:}%{version}-%{release}') |
||||||
|
local obsoletes = python.python_altobsoletes_once(name, evr) |
||||||
|
-- obsoletes is either an array/table or nil |
||||||
|
-- nil means the function was already called with the same arguments: |
||||||
|
-- either with another file in %1 or manually via %py_provides |
||||||
|
if obsoletes then |
||||||
|
for i, obsolete in ipairs(obsoletes) do |
||||||
|
print(obsolete .. ' ') |
||||||
|
end |
||||||
|
end |
||||||
|
}} |
||||||
|
|
||||||
|
%__pythonname_path ^/ |
@ -0,0 +1,115 @@ |
|||||||
|
From 2c3f3a590ddfc151a456b44a5f96f0f603d178e9 Mon Sep 17 00:00:00 2001 |
||||||
|
From: Lumir Balhar <lbalhar@redhat.com> |
||||||
|
Date: Wed, 16 Feb 2022 08:36:21 +0100 |
||||||
|
Subject: [PATCH] Prevent removing of the system packages installed under |
||||||
|
/usr/lib when pip install --upgrade is executed. |
||||||
|
MIME-Version: 1.0 |
||||||
|
Content-Type: text/plain; charset=UTF-8 |
||||||
|
Content-Transfer-Encoding: 8bit |
||||||
|
|
||||||
|
Resolves: rhbz#1550368 |
||||||
|
|
||||||
|
Co-Authored-By: Michal Cyprian <m.cyprian@gmail.com> |
||||||
|
Co-Authored-By: Victor Stinner <vstinner@redhat.com> |
||||||
|
Co-Authored-By: Petr Viktorin <pviktori@redhat.com> |
||||||
|
Co-Authored-By: Lumir Balhar <lbalhar@redhat.com> |
||||||
|
Co-Authored-By: Miro Hrončok <miro@hroncok.cz> |
||||||
|
Co-Authored-By: Karolina Surma <ksurma@redhat.com> |
||||||
|
--- |
||||||
|
src/pip/_internal/metadata/base.py | 12 +++++++++++- |
||||||
|
src/pip/_internal/req/req_install.py | 2 +- |
||||||
|
src/pip/_internal/resolution/legacy/resolver.py | 4 +++- |
||||||
|
src/pip/_internal/resolution/resolvelib/factory.py | 12 ++++++++++++ |
||||||
|
4 files changed, 27 insertions(+), 3 deletions(-) |
||||||
|
|
||||||
|
diff --git a/src/pip/_internal/metadata/base.py b/src/pip/_internal/metadata/base.py |
||||||
|
index 151fd6d..f9109cd 100644 |
||||||
|
--- a/src/pip/_internal/metadata/base.py |
||||||
|
+++ b/src/pip/_internal/metadata/base.py |
||||||
|
@@ -28,7 +28,7 @@ from pip._vendor.packaging.utils import NormalizedName |
||||||
|
from pip._vendor.packaging.version import LegacyVersion, Version |
||||||
|
|
||||||
|
from pip._internal.exceptions import NoneMetadataError |
||||||
|
-from pip._internal.locations import site_packages, user_site |
||||||
|
+from pip._internal.locations import get_scheme, site_packages, user_site |
||||||
|
from pip._internal.models.direct_url import ( |
||||||
|
DIRECT_URL_METADATA_NAME, |
||||||
|
DirectUrl, |
||||||
|
@@ -560,6 +560,16 @@ class BaseDistribution(Protocol): |
||||||
|
for extra in self._iter_egg_info_extras(): |
||||||
|
metadata["Provides-Extra"] = extra |
||||||
|
|
||||||
|
+ @property |
||||||
|
+ def in_install_path(self) -> bool: |
||||||
|
+ """ |
||||||
|
+ Return True if given Distribution is installed in |
||||||
|
+ path matching distutils_scheme layout. |
||||||
|
+ """ |
||||||
|
+ norm_path = normalize_path(self.installed_location) |
||||||
|
+ return norm_path.startswith(normalize_path( |
||||||
|
+ get_scheme("").purelib.split('python')[0])) |
||||||
|
+ |
||||||
|
|
||||||
|
class BaseEnvironment: |
||||||
|
"""An environment containing distributions to introspect.""" |
||||||
|
diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py |
||||||
|
index a1e376c..ed7facf 100644 |
||||||
|
--- a/src/pip/_internal/req/req_install.py |
||||||
|
+++ b/src/pip/_internal/req/req_install.py |
||||||
|
@@ -416,7 +416,7 @@ class InstallRequirement: |
||||||
|
f"lack sys.path precedence to {existing_dist.raw_name} " |
||||||
|
f"in {existing_dist.location}" |
||||||
|
) |
||||||
|
- else: |
||||||
|
+ elif existing_dist.in_install_path: |
||||||
|
self.should_reinstall = True |
||||||
|
else: |
||||||
|
if self.editable: |
||||||
|
diff --git a/src/pip/_internal/resolution/legacy/resolver.py b/src/pip/_internal/resolution/legacy/resolver.py |
||||||
|
index fb49d41..040f2c1 100644 |
||||||
|
--- a/src/pip/_internal/resolution/legacy/resolver.py |
||||||
|
+++ b/src/pip/_internal/resolution/legacy/resolver.py |
||||||
|
@@ -325,7 +325,9 @@ class Resolver(BaseResolver): |
||||||
|
""" |
||||||
|
# Don't uninstall the conflict if doing a user install and the |
||||||
|
# conflict is not a user install. |
||||||
|
- if not self.use_user_site or req.satisfied_by.in_usersite: |
||||||
|
+ if ((not self.use_user_site |
||||||
|
+ or req.satisfied_by.in_usersite) |
||||||
|
+ and req.satisfied_by.in_install_path): |
||||||
|
req.should_reinstall = True |
||||||
|
req.satisfied_by = None |
||||||
|
|
||||||
|
diff --git a/src/pip/_internal/resolution/resolvelib/factory.py b/src/pip/_internal/resolution/resolvelib/factory.py |
||||||
|
index a4c24b5..e7e2da9 100644 |
||||||
|
--- a/src/pip/_internal/resolution/resolvelib/factory.py |
||||||
|
+++ b/src/pip/_internal/resolution/resolvelib/factory.py |
||||||
|
@@ -1,6 +1,8 @@ |
||||||
|
import contextlib |
||||||
|
import functools |
||||||
|
import logging |
||||||
|
+import sys |
||||||
|
+import sysconfig |
||||||
|
from typing import ( |
||||||
|
TYPE_CHECKING, |
||||||
|
Dict, |
||||||
|
@@ -549,6 +551,16 @@ class Factory: |
||||||
|
if dist is None: # Not installed, no uninstallation required. |
||||||
|
return None |
||||||
|
|
||||||
|
+ # Prevent uninstalling packages from /usr |
||||||
|
+ try: |
||||||
|
+ if dist.installed_location in ( |
||||||
|
+ sysconfig.get_path('purelib', scheme='rpm_prefix', vars={'base': sys.base_prefix}), |
||||||
|
+ sysconfig.get_path('platlib', scheme='rpm_prefix', vars={'base': sys.base_prefix}), |
||||||
|
+ ): |
||||||
|
+ return None |
||||||
|
+ except KeyError: # this Python doesn't have 'rpm_prefix' scheme yet |
||||||
|
+ pass |
||||||
|
+ |
||||||
|
# We're installing into global site. The current installation must |
||||||
|
# be uninstalled, no matter it's in global or user site, because the |
||||||
|
# user site installation has precedence over global. |
||||||
|
-- |
||||||
|
2.35.3 |
||||||
|
|
@ -0,0 +1,11 @@ |
|||||||
|
../../../bin/__pycache__/tldr.cpython-37.pyc,, |
||||||
|
../../../bin/tldr,sha256=6MUiLCWhldmV8OelT2dvPgS7q5GFwuhvd6th0Bb-LH4,12766 |
||||||
|
../../../bin/tldr.py,sha256=6MUiLCWhldmV8OelT2dvPgS7q5GFwuhvd6th0Bb-LH4,12766 |
||||||
|
__pycache__/tldr.cpython-37.pyc,, |
||||||
|
tldr-0.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 |
||||||
|
tldr-0.5.dist-info/LICENSE,sha256=q7quAfjDWCYKC_WRk_uaP6d2wwVpOpVjUSkv8l6H7xI,1075 |
||||||
|
tldr-0.5.dist-info/METADATA,sha256=AN5nYUVxo_zkVaMGKu34YDWWif84oA6uxKmTab213vM,3850 |
||||||
|
tldr-0.5.dist-info/RECORD,, |
||||||
|
tldr-0.5.dist-info/WHEEL,sha256=S8S5VL-stOTSZDYxHyf0KP7eds0J72qrK0Evu3TfyAY,92 |
||||||
|
tldr-0.5.dist-info/top_level.txt,sha256=xHSI9WD6Y-_hONbi2b_9RIn9oiO7RBGHU3A8geJq3mI,5 |
||||||
|
tldr.py,sha256=aJlA3tIz4QYYy8e7DZUhPyLCqTwnfFjA7Nubwm9bPe0,12779 |
@ -0,0 +1,85 @@ |
|||||||
|
from pathlib import Path |
||||||
|
import importlib.metadata |
||||||
|
|
||||||
|
import pytest |
||||||
|
import yaml |
||||||
|
|
||||||
|
from pyproject_buildrequires import generate_requires |
||||||
|
|
||||||
|
|
||||||
|
testcases = {} |
||||||
|
with Path(__file__).parent.joinpath('pyproject_buildrequires_testcases.yaml').open() as f: |
||||||
|
testcases = yaml.safe_load(f) |
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('case_name', testcases) |
||||||
|
def test_data(case_name, capfd, tmp_path, monkeypatch): |
||||||
|
case = testcases[case_name] |
||||||
|
|
||||||
|
cwd = tmp_path.joinpath('cwd') |
||||||
|
cwd.mkdir() |
||||||
|
monkeypatch.chdir(cwd) |
||||||
|
wheeldir = cwd.joinpath('wheeldir') |
||||||
|
wheeldir.mkdir() |
||||||
|
|
||||||
|
if case.get('xfail'): |
||||||
|
pytest.xfail(case.get('xfail')) |
||||||
|
|
||||||
|
for filename in case: |
||||||
|
file_types = ('.toml', '.py', '.in', '.ini', '.txt') |
||||||
|
if filename.endswith(file_types): |
||||||
|
cwd.joinpath(filename).write_text(case[filename]) |
||||||
|
|
||||||
|
for name, value in case.get('environ', {}).items(): |
||||||
|
monkeypatch.setenv(name, value) |
||||||
|
|
||||||
|
def get_installed_version(dist_name): |
||||||
|
try: |
||||||
|
return str(case['installed'][dist_name]) |
||||||
|
except (KeyError, TypeError): |
||||||
|
raise importlib.metadata.PackageNotFoundError( |
||||||
|
f'info not found for {dist_name}' |
||||||
|
) |
||||||
|
requirement_files = case.get('requirement_files', []) |
||||||
|
requirement_files = [open(f) for f in requirement_files] |
||||||
|
use_build_system = case.get('use_build_system', True) |
||||||
|
try: |
||||||
|
generate_requires( |
||||||
|
get_installed_version=get_installed_version, |
||||||
|
include_runtime=case.get('include_runtime', use_build_system), |
||||||
|
build_wheel=case.get('build_wheel', False), |
||||||
|
wheeldir=str(wheeldir), |
||||||
|
extras=case.get('extras', []), |
||||||
|
toxenv=case.get('toxenv', None), |
||||||
|
generate_extras=case.get('generate_extras', False), |
||||||
|
requirement_files=requirement_files, |
||||||
|
use_build_system=use_build_system, |
||||||
|
) |
||||||
|
except SystemExit as e: |
||||||
|
assert e.code == case['result'] |
||||||
|
except Exception as e: |
||||||
|
if 'except' not in case: |
||||||
|
raise |
||||||
|
assert type(e).__name__ == case['except'] |
||||||
|
else: |
||||||
|
assert 0 == case['result'] |
||||||
|
|
||||||
|
# this prevents us from accidentally writing "empty" tests |
||||||
|
# if we ever need to do that, we can remove the check or change it: |
||||||
|
assert 'expected' in case or 'stderr_contains' in case |
||||||
|
|
||||||
|
out, err = capfd.readouterr() |
||||||
|
|
||||||
|
if 'expected' in case: |
||||||
|
assert out == case['expected'] |
||||||
|
|
||||||
|
# stderr_contains may be a string or list of strings |
||||||
|
stderr_contains = case.get('stderr_contains') |
||||||
|
if stderr_contains is not None: |
||||||
|
if isinstance(stderr_contains, str): |
||||||
|
stderr_contains = [stderr_contains] |
||||||
|
for expected_substring in stderr_contains: |
||||||
|
assert expected_substring.format(**locals()) in err |
||||||
|
finally: |
||||||
|
for req in requirement_files: |
||||||
|
req.close() |
@ -0,0 +1,78 @@ |
|||||||
|
from pathlib import Path |
||||||
|
from textwrap import dedent |
||||||
|
|
||||||
|
from pyproject_requirements_txt import convert_requirements_txt |
||||||
|
|
||||||
|
|
||||||
|
def test_requirements_add_pkgname(): |
||||||
|
reqs_txt = dedent(r""" |
||||||
|
good@git+https://github.com/monty/spam.git@master#egg=bad |
||||||
|
git+https://github.com/monty/spam.git@master#egg=ugly |
||||||
|
https://example.com/undead.tar.gz#egg=undead ; python_version > 3.0 |
||||||
|
""") |
||||||
|
result = convert_requirements_txt(reqs_txt.splitlines()) |
||||||
|
|
||||||
|
expected = [ |
||||||
|
'good@git+https://github.com/monty/spam.git@master#egg=bad', |
||||||
|
'ugly@git+https://github.com/monty/spam.git@master#egg=ugly', |
||||||
|
'undead@https://example.com/undead.tar.gz#egg=undead ; python_version > 3.0', |
||||||
|
] |
||||||
|
assert result == expected |
||||||
|
|
||||||
|
|
||||||
|
def test_requirements_preprocess(monkeypatch): |
||||||
|
reqs_txt = dedent(r""" |
||||||
|
Normal_Req ~= 1.2.0 |
||||||
|
whitespace-stripped < 3 <END> |
||||||
|
|
||||||
|
# indentation is preserved in continuations: |
||||||
|
foo <=\ |
||||||
|
30 |
||||||
|
bar<= \ |
||||||
|
30 |
||||||
|
# names and operators can be split: |
||||||
|
this-was-\ |
||||||
|
too-long<\ |
||||||
|
=30 |
||||||
|
|
||||||
|
# this is not a multi-line comment \ |
||||||
|
some-dep |
||||||
|
# neither is this \ |
||||||
|
other-dep |
||||||
|
another-dep # but this *is* a multi-line coment \ |
||||||
|
so any garbage can be here |
||||||
|
dep-a # and this comment ends with the blank line below \ |
||||||
|
|
||||||
|
dep-b |
||||||
|
${ENVVAR} |
||||||
|
whitespace-stripped-before-substitution ${SPACE} |
||||||
|
${MISSING_ENVVAR} |
||||||
|
""".replace('<END>', '')) |
||||||
|
monkeypatch.setenv('ENVVAR', 'package-from-env') |
||||||
|
monkeypatch.setenv('SPACE', ' ') |
||||||
|
monkeypatch.delenv('MISSING_ENVVAR', raising=False) |
||||||
|
result = convert_requirements_txt(reqs_txt.splitlines()) |
||||||
|
|
||||||
|
expected = [ |
||||||
|
'Normal_Req ~= 1.2.0', |
||||||
|
'whitespace-stripped < 3', |
||||||
|
'foo <= 30', |
||||||
|
'bar<= 30', |
||||||
|
'this-was-too-long<=30', |
||||||
|
'some-dep', |
||||||
|
'other-dep', |
||||||
|
'another-dep', |
||||||
|
'dep-a', |
||||||
|
'dep-b', |
||||||
|
'package-from-env', |
||||||
|
'whitespace-stripped-before-substitution ', |
||||||
|
'${MISSING_ENVVAR}', |
||||||
|
] |
||||||
|
#result = expected |
||||||
|
assert result == expected |
||||||
|
|
||||||
|
# This test uses pip internals, so it might break in the future. |
||||||
|
from pip._internal.req.req_file import preprocess |
||||||
|
expected = [line for lineno, line in preprocess(reqs_txt)] |
||||||
|
assert result == expected |
||||||
|
|
@ -0,0 +1,263 @@ |
|||||||
|
import pytest |
||||||
|
import yaml |
||||||
|
|
||||||
|
from pathlib import Path |
||||||
|
from pprint import pprint |
||||||
|
|
||||||
|
from pyproject_preprocess_record import parse_record, read_record, save_parsed_record |
||||||
|
|
||||||
|
from pyproject_save_files import argparser, generate_file_list, BuildrootPath |
||||||
|
from pyproject_save_files import main as save_files_main |
||||||
|
from pyproject_save_files import module_names_from_path |
||||||
|
|
||||||
|
DIR = Path(__file__).parent |
||||||
|
PREFIX = Path("/usr") |
||||||
|
BINDIR = BuildrootPath("/usr/bin") |
||||||
|
DATADIR = BuildrootPath("/usr/share") |
||||||
|
SITELIB = BuildrootPath("/usr/lib/python3.7/site-packages") |
||||||
|
SITEARCH = BuildrootPath("/usr/lib64/python3.7/site-packages") |
||||||
|
|
||||||
|
yaml_file = DIR / "pyproject_save_files_test_data.yaml" |
||||||
|
yaml_data = yaml.safe_load(yaml_file.read_text()) |
||||||
|
EXPECTED_DICT = yaml_data["classified"] |
||||||
|
EXPECTED_FILES = yaml_data["dumped"] |
||||||
|
TEST_RECORDS = yaml_data["records"] |
||||||
|
TEST_METADATAS = yaml_data["metadata"] |
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture |
||||||
|
def tldr_root(tmp_path): |
||||||
|
prepare_pyproject_record(tmp_path, package="tldr") |
||||||
|
return tmp_path |
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture |
||||||
|
def pyproject_record(tmp_path): |
||||||
|
return tmp_path / "pyproject-record" |
||||||
|
|
||||||
|
|
||||||
|
def prepare_pyproject_record(tmp_path, package=None, content=None): |
||||||
|
""" |
||||||
|
Creates RECORD from test data and then uses |
||||||
|
functions from pyproject_process_record to convert |
||||||
|
it to pyproject-record file which is then |
||||||
|
further processed by functions from pyproject_save_files. |
||||||
|
""" |
||||||
|
record_file = tmp_path / "RECORD" |
||||||
|
pyproject_record = tmp_path / "pyproject-record" |
||||||
|
|
||||||
|
if package is not None: |
||||||
|
# Get test data and write dist-info/RECORD file |
||||||
|
record_path = BuildrootPath(TEST_RECORDS[package]["path"]) |
||||||
|
record_file.write_text(TEST_RECORDS[package]["content"]) |
||||||
|
if package in TEST_METADATAS: |
||||||
|
metadata_path = BuildrootPath(TEST_METADATAS[package]["path"]).to_real(tmp_path) |
||||||
|
metadata_path.parent.mkdir(parents=True, exist_ok=True) |
||||||
|
metadata_path.write_text(TEST_METADATAS[package]["content"]) |
||||||
|
# Parse RECORD file |
||||||
|
parsed_record = parse_record(record_path, read_record(record_file)) |
||||||
|
# Save JSON content to pyproject-record |
||||||
|
save_parsed_record(record_path, parsed_record, pyproject_record) |
||||||
|
elif content is not None: |
||||||
|
save_parsed_record(*content, output_file=pyproject_record) |
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture |
||||||
|
def output_files(tmp_path): |
||||||
|
return tmp_path / "pyproject_files" |
||||||
|
|
||||||
|
@pytest.fixture |
||||||
|
def output_modules(tmp_path): |
||||||
|
return tmp_path / "pyproject_modules" |
||||||
|
|
||||||
|
|
||||||
|
def test_parse_record_tldr(): |
||||||
|
record_path = BuildrootPath(TEST_RECORDS["tldr"]["path"]) |
||||||
|
record_content = read_record(DIR / "test_RECORD") |
||||||
|
output = list(parse_record(record_path, record_content)) |
||||||
|
pprint(output) |
||||||
|
expected = [ |
||||||
|
str(BINDIR / "__pycache__/tldr.cpython-37.pyc"), |
||||||
|
str(BINDIR / "tldr"), |
||||||
|
str(BINDIR / "tldr.py"), |
||||||
|
str(SITELIB / "__pycache__/tldr.cpython-37.pyc"), |
||||||
|
str(SITELIB / "tldr-0.5.dist-info/INSTALLER"), |
||||||
|
str(SITELIB / "tldr-0.5.dist-info/LICENSE"), |
||||||
|
str(SITELIB / "tldr-0.5.dist-info/METADATA"), |
||||||
|
str(SITELIB / "tldr-0.5.dist-info/RECORD"), |
||||||
|
str(SITELIB / "tldr-0.5.dist-info/WHEEL"), |
||||||
|
str(SITELIB / "tldr-0.5.dist-info/top_level.txt"), |
||||||
|
str(SITELIB / "tldr.py"), |
||||||
|
] |
||||||
|
assert output == expected |
||||||
|
|
||||||
|
|
||||||
|
def test_parse_record_tensorflow(): |
||||||
|
long = "tensorflow_core/include/tensorflow/core/common_runtime/base_collective_executor.h" |
||||||
|
record_path = SITEARCH / "tensorflow-2.1.0.dist-info/RECORD" |
||||||
|
record_content = [ |
||||||
|
["../../../bin/toco_from_protos", "sha256=hello", "289"], |
||||||
|
[f"../../../lib/python3.7/site-packages/{long}", "sha256=darkness", "1024"], |
||||||
|
["tensorflow-2.1.0.dist-info/METADATA", "sha256=friend", "2859"], |
||||||
|
] |
||||||
|
output = list(parse_record(record_path, record_content)) |
||||||
|
pprint(output) |
||||||
|
expected = [ |
||||||
|
str(BINDIR / "toco_from_protos"), |
||||||
|
str(SITELIB / long), |
||||||
|
str(SITEARCH / "tensorflow-2.1.0.dist-info/METADATA"), |
||||||
|
] |
||||||
|
assert output == expected |
||||||
|
|
||||||
|
|
||||||
|
def remove_others(expected): |
||||||
|
return [ |
||||||
|
p for p in expected |
||||||
|
if not ( |
||||||
|
p.startswith(str(BINDIR)) or |
||||||
|
p.endswith(".pth") or |
||||||
|
p.endswith("*") or |
||||||
|
p.rpartition(' ')[-1].startswith(str(DATADIR)) |
||||||
|
) |
||||||
|
] |
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("include_auto", (True, False)) |
||||||
|
@pytest.mark.parametrize("package, glob, expected_files, expected_modules", EXPECTED_FILES) |
||||||
|
def test_generate_file_list(package, glob, expected_files, include_auto, expected_modules): |
||||||
|
paths_dict = EXPECTED_DICT[package] |
||||||
|
modules_glob = {glob} |
||||||
|
if not include_auto: |
||||||
|
expected_files = remove_others(expected_files) |
||||||
|
tested = generate_file_list(paths_dict, modules_glob, include_auto) |
||||||
|
|
||||||
|
assert tested == expected_files |
||||||
|
|
||||||
|
|
||||||
|
def test_generate_file_list_unused_glob(): |
||||||
|
paths_dict = EXPECTED_DICT["kerberos"] |
||||||
|
modules_glob = {"kerberos", "unused_glob1", "unused_glob2", "kerb*"} |
||||||
|
with pytest.raises(ValueError) as excinfo: |
||||||
|
generate_file_list(paths_dict, modules_glob, True) |
||||||
|
|
||||||
|
assert "unused_glob1, unused_glob2" in str(excinfo.value) |
||||||
|
assert "kerb" not in str(excinfo.value) |
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize( |
||||||
|
"path, expected", |
||||||
|
[ |
||||||
|
("foo/bar/baz.py", {"foo", "foo.bar", "foo.bar.baz"}), |
||||||
|
("foo/bar.py", {"foo", "foo.bar"}), |
||||||
|
("foo.py", {"foo"}), |
||||||
|
("foo/bar.so.2", set()), |
||||||
|
("foo.cpython-37m-x86_64-linux-gnu.so", {"foo"}), |
||||||
|
("foo/_api/v2/__init__.py", set()), |
||||||
|
("foo/__init__.py", {"foo"}), |
||||||
|
("foo/_priv.py", set()), |
||||||
|
("foo/_bar/lib.so", set()), |
||||||
|
("foo/bar/baz.so", {"foo", "foo.bar", "foo.bar.baz"}), |
||||||
|
("foo/bar/baz.pth", set()), |
||||||
|
("foo/bar/baz.pyc", set()), |
||||||
|
("def.py", set()), |
||||||
|
("foo-bar/baz.py", set()), |
||||||
|
("foobar/12baz.py", set()), |
||||||
|
("foo/\nbar/baz.py", set()), |
||||||
|
("foo/+bar/baz.py", set()), |
||||||
|
("foo/__init__.cpython-39-x86_64-linux-gnu.so", {"foo"}), |
||||||
|
("foo/bar/__pycache__/abc.cpython-37.pyc", set()), |
||||||
|
], |
||||||
|
) |
||||||
|
def test_module_names_from_path(path, expected): |
||||||
|
tested = Path(path) |
||||||
|
assert module_names_from_path(tested) == expected |
||||||
|
|
||||||
|
|
||||||
|
def default_options(output_files, output_modules, mock_root, pyproject_record): |
||||||
|
return [ |
||||||
|
"--output-files", |
||||||
|
str(output_files), |
||||||
|
"--output-modules", |
||||||
|
str(output_modules), |
||||||
|
"--buildroot", |
||||||
|
str(mock_root), |
||||||
|
"--sitelib", |
||||||
|
str(SITELIB), |
||||||
|
"--sitearch", |
||||||
|
str(SITEARCH), |
||||||
|
"--python-version", |
||||||
|
"3.7", # test data are for 3.7, |
||||||
|
"--pyproject-record", |
||||||
|
str(pyproject_record), |
||||||
|
"--prefix", |
||||||
|
str(PREFIX), |
||||||
|
] |
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("include_auto", (True, False)) |
||||||
|
@pytest.mark.parametrize("package, glob, expected_files, expected_modules", EXPECTED_FILES) |
||||||
|
def test_cli(tmp_path, package, glob, expected_files, expected_modules, include_auto, pyproject_record): |
||||||
|
prepare_pyproject_record(tmp_path, package) |
||||||
|
output_files = tmp_path / "files" |
||||||
|
output_modules = tmp_path / "modules" |
||||||
|
globs = [glob, "+auto"] if include_auto else [glob] |
||||||
|
cli_args = argparser().parse_args([*default_options(output_files, output_modules, tmp_path, pyproject_record), *globs]) |
||||||
|
save_files_main(cli_args) |
||||||
|
|
||||||
|
if not include_auto: |
||||||
|
expected_files = remove_others(expected_files) |
||||||
|
tested_files = output_files.read_text() |
||||||
|
assert tested_files == "\n".join(expected_files) + "\n" |
||||||
|
|
||||||
|
tested_modules = output_modules.read_text().split() |
||||||
|
|
||||||
|
assert tested_modules == expected_modules |
||||||
|
|
||||||
|
|
||||||
|
def test_cli_no_pyproject_record(tmp_path, pyproject_record): |
||||||
|
output_files = tmp_path / "files" |
||||||
|
output_modules = tmp_path / "modules" |
||||||
|
cli_args = argparser().parse_args([*default_options(output_files, output_modules, tmp_path, pyproject_record), "tldr*"]) |
||||||
|
|
||||||
|
with pytest.raises(FileNotFoundError): |
||||||
|
save_files_main(cli_args) |
||||||
|
|
||||||
|
|
||||||
|
def test_cli_too_many_RECORDS(tldr_root, output_files, output_modules, pyproject_record): |
||||||
|
# Two calls to simulate how %pyproject_install process more than one RECORD file |
||||||
|
prepare_pyproject_record(tldr_root, |
||||||
|
content=("foo/bar/dist-info/RECORD", [])) |
||||||
|
prepare_pyproject_record(tldr_root, |
||||||
|
content=("foo/baz/dist-info/RECORD", [])) |
||||||
|
cli_args = argparser().parse_args([*default_options(output_files, output_modules, tldr_root, pyproject_record), "tldr*"]) |
||||||
|
|
||||||
|
with pytest.raises(FileExistsError): |
||||||
|
save_files_main(cli_args) |
||||||
|
|
||||||
|
|
||||||
|
def test_cli_bad_argument(tldr_root, output_files, output_modules, pyproject_record): |
||||||
|
cli_args = argparser().parse_args( |
||||||
|
[*default_options(output_files, output_modules, tldr_root, pyproject_record), "tldr*", "+foodir"] |
||||||
|
) |
||||||
|
|
||||||
|
with pytest.raises(ValueError): |
||||||
|
save_files_main(cli_args) |
||||||
|
|
||||||
|
|
||||||
|
def test_cli_bad_option(tldr_root, output_files, output_modules, pyproject_record): |
||||||
|
prepare_pyproject_record(tldr_root.parent, content=("RECORD1", [])) |
||||||
|
cli_args = argparser().parse_args( |
||||||
|
[*default_options(output_files, output_modules, tldr_root, pyproject_record), "tldr*", "you_cannot_have_this"] |
||||||
|
) |
||||||
|
|
||||||
|
with pytest.raises(ValueError): |
||||||
|
save_files_main(cli_args) |
||||||
|
|
||||||
|
|
||||||
|
def test_cli_bad_namespace(tldr_root, output_files, output_modules, pyproject_record): |
||||||
|
cli_args = argparser().parse_args( |
||||||
|
[*default_options(output_files, output_modules, tldr_root, pyproject_record), "tldr.didntread"] |
||||||
|
) |
||||||
|
|
||||||
|
with pytest.raises(ValueError): |
||||||
|
save_files_main(cli_args) |
@ -0,0 +1,375 @@ |
|||||||
|
%define _rpmmacrodir /usr/lib/rpm/macros.d/ |
||||||
|
|
||||||
|
Name: pyproject-rpm-macros |
||||||
|
Summary: RPM macros for PEP 517 Python packages |
||||||
|
License: MIT |
||||||
|
|
||||||
|
%bcond_with tests |
||||||
|
|
||||||
|
# The idea is to follow the spirit of semver |
||||||
|
# Given version X.Y.Z: |
||||||
|
# Increment X and reset Y.Z when there is a *major* incompatibility |
||||||
|
# Increment Y and reset Z when new macros or features are added |
||||||
|
# Increment Z when this is a bugfix or a cosmetic change |
||||||
|
# Dropping support for EOL Fedoras is *not* considered a breaking change |
||||||
|
Version: 1.3.2 |
||||||
|
Release: 1%{?dist} |
||||||
|
|
||||||
|
# Macro files |
||||||
|
Source001: macros.pyproject |
||||||
|
|
||||||
|
# Implementation files |
||||||
|
Source101: pyproject_buildrequires.py |
||||||
|
Source102: pyproject_save_files.py |
||||||
|
Source103: pyproject_convert.py |
||||||
|
Source104: pyproject_preprocess_record.py |
||||||
|
Source105: pyproject_construct_toxenv.py |
||||||
|
Source106: pyproject_requirements_txt.py |
||||||
|
Source107: pyproject_wheel.py |
||||||
|
|
||||||
|
# Tests |
||||||
|
Source201: test_pyproject_buildrequires.py |
||||||
|
Source202: test_pyproject_save_files.py |
||||||
|
Source203: test_pyproject_requirements_txt.py |
||||||
|
Source204: compare_mandata.py |
||||||
|
|
||||||
|
# Test data |
||||||
|
Source301: pyproject_buildrequires_testcases.yaml |
||||||
|
Source302: pyproject_save_files_test_data.yaml |
||||||
|
Source303: test_RECORD |
||||||
|
|
||||||
|
# Metadata |
||||||
|
Source901: README.md |
||||||
|
Source902: LICENSE |
||||||
|
|
||||||
|
URL: https://src.fedoraproject.org/rpms/pyproject-rpm-macros |
||||||
|
|
||||||
|
BuildArch: noarch |
||||||
|
|
||||||
|
%if %{with tests} |
||||||
|
BuildRequires: python3dist(pytest) |
||||||
|
BuildRequires: python3dist(pyyaml) |
||||||
|
BuildRequires: python3dist(packaging) |
||||||
|
BuildRequires: python3dist(pip) |
||||||
|
BuildRequires: python3dist(setuptools) |
||||||
|
BuildRequires: python3dist(tox-current-env) >= 0.0.6 |
||||||
|
BuildRequires: python3dist(wheel) |
||||||
|
#BuildRequires: (python3dist(toml) if python3-devel < 3.11) |
||||||
|
%endif |
||||||
|
|
||||||
|
# We build on top of those: |
||||||
|
Requires: python-rpm-macros |
||||||
|
Requires: python-srpm-macros |
||||||
|
Requires: python3-rpm-macros |
||||||
|
|
||||||
|
# We use the following tools outside of coreutils |
||||||
|
Requires: /usr/bin/find |
||||||
|
Requires: /usr/bin/sed |
||||||
|
|
||||||
|
%description |
||||||
|
These macros allow projects that follow the Python packaging specifications |
||||||
|
to be packaged as RPMs. |
||||||
|
|
||||||
|
They work for: |
||||||
|
|
||||||
|
* traditional Setuptools-based projects that use the setup.py file, |
||||||
|
* newer Setuptools-based projects that have a setup.cfg file, |
||||||
|
* general Python projects that use the PEP 517 pyproject.toml file |
||||||
|
(which allows using any build system, such as setuptools, flit or poetry). |
||||||
|
|
||||||
|
These macros replace %%py3_build and %%py3_install, |
||||||
|
which only work with setup.py. |
||||||
|
|
||||||
|
|
||||||
|
%prep |
||||||
|
# Not strictly necessary but allows working on file names instead |
||||||
|
# of source numbers in install section |
||||||
|
%setup -c -T |
||||||
|
cp -p %{sources} . |
||||||
|
|
||||||
|
%build |
||||||
|
# nothing to do, sources are not buildable |
||||||
|
|
||||||
|
%install |
||||||
|
mkdir -p %{buildroot}%{_rpmmacrodir} |
||||||
|
mkdir -p %{buildroot}%{_rpmconfigdir}/redhat |
||||||
|
install -m 644 macros.pyproject %{buildroot}%{_rpmmacrodir}/ |
||||||
|
install -m 644 pyproject_buildrequires.py %{buildroot}%{_rpmconfigdir}/redhat/ |
||||||
|
install -m 644 pyproject_convert.py %{buildroot}%{_rpmconfigdir}/redhat/ |
||||||
|
install -m 644 pyproject_save_files.py %{buildroot}%{_rpmconfigdir}/redhat/ |
||||||
|
install -m 644 pyproject_preprocess_record.py %{buildroot}%{_rpmconfigdir}/redhat/ |
||||||
|
install -m 644 pyproject_construct_toxenv.py %{buildroot}%{_rpmconfigdir}/redhat/ |
||||||
|
install -m 644 pyproject_requirements_txt.py %{buildroot}%{_rpmconfigdir}/redhat/ |
||||||
|
install -m 644 pyproject_wheel.py %{buildroot}%{_rpmconfigdir}/redhat/ |
||||||
|
|
||||||
|
%if %{with tests} |
||||||
|
%check |
||||||
|
export HOSTNAME="rpmbuild" # to speedup tox in network-less mock, see rhbz#1856356 |
||||||
|
%{python3} -m pytest -vv --doctest-modules |
||||||
|
|
||||||
|
# brp-compress is provided as an argument to get the right directory macro expansion |
||||||
|
%{python3} compare_mandata.py -f %{_rpmconfigdir}/brp-compress |
||||||
|
%endif |
||||||
|
|
||||||
|
|
||||||
|
%files |
||||||
|
%{_rpmmacrodir}/macros.pyproject |
||||||
|
%{_rpmconfigdir}/redhat/pyproject_buildrequires.py* |
||||||
|
%{_rpmconfigdir}/redhat/pyproject_convert.py* |
||||||
|
%{_rpmconfigdir}/redhat/pyproject_save_files.py* |
||||||
|
%{_rpmconfigdir}/redhat/pyproject_preprocess_record.py* |
||||||
|
%{_rpmconfigdir}/redhat/pyproject_construct_toxenv.py* |
||||||
|
%{_rpmconfigdir}/redhat/pyproject_requirements_txt.py* |
||||||
|
%{_rpmconfigdir}/redhat/pyproject_wheel.py* |
||||||
|
|
||||||
|
%doc README.md |
||||||
|
%license LICENSE |
||||||
|
|
||||||
|
%changelog |
||||||
|
* Wed Jun 15 2022 Benjamin A. Beasley <code@musicinmybrain.net> - 1.3.2-1 |
||||||
|
- Update %%pyproject_build_lib to support setuptools 62.1.0 and later |
||||||
|
- Fixes: rhbz#2097158 |
||||||
|
- %%pyproject_buildrequires: When extension modules are built, |
||||||
|
support https://fedoraproject.org/wiki/Changes/Package_information_on_ELF_objects |
||||||
|
- Fixes: rhbz#2097535 |
||||||
|
|
||||||
|
* Fri May 27 2022 Owen Taylor <otaylor@redhat.com> - 1.3.1-1 |
||||||
|
- %%pyproject_install: pass %%{_prefix} explicitly to pip install |
||||||
|
|
||||||
|
* Thu May 12 2022 Miro Hrončok <mhroncok@redhat.com> - 1.3.0-1 |
||||||
|
- Use tomllib from the standard library on Python 3.11+ |
||||||
|
|
||||||
|
* Wed Apr 27 2022 Miro Hrončok <mhroncok@redhat.com> - 1.2.0-1 |
||||||
|
- %%pyproject_buildrequires: Add provisional -w flag for build backends without |
||||||
|
prepare_metadata_for_build_wheel hook |
||||||
|
When used, the wheel is built in %%pyproject_buildrequires |
||||||
|
and information about runtime requires and extras is read from that wheel. |
||||||
|
- Fixes: rhbz#2076994 |
||||||
|
|
||||||
|
* Tue Apr 12 2022 Miro Hrončok <mhroncok@redhat.com> - 1.1.0-1 |
||||||
|
- %%pyproject_save_files: Support nested directories in dist-info |
||||||
|
- Fixes: rhbz#1985340 |
||||||
|
|
||||||
|
* Tue Mar 22 2022 Miro Hrončok <mhroncok@redhat.com> - 1.0.1-1 |
||||||
|
- Prefix paths of intermediate files (such as %%{pyproject_files}) with NVRA |
||||||
|
|
||||||
|
* Tue Mar 01 2022 Miro Hrončok <mhroncok@redhat.com> - 1.0.0-1 |
||||||
|
- Release final version 1.0.0 |
||||||
|
|
||||||
|
* Mon Feb 07 2022 Lumír Balhar <lbalhar@redhat.com> - 1.0.0~rc2-1 |
||||||
|
- Updated compatibility with tox4 |
||||||
|
|
||||||
|
* Tue Jan 25 2022 Miro Hrončok <mhroncok@redhat.com> - 1.0.0~rc1-1 |
||||||
|
- Release version 1.0.0, first release candidate |
||||||
|
|
||||||
|
* Mon Jan 24 2022 Miro Hrončok <mhroncok@redhat.com> - 0-55 |
||||||
|
- %%pyproject_buildrequires: Generate BuildRequires for this package |
||||||
|
This package is already installed, but this way, the resulting SRPM explicitly BuildRequires it |
||||||
|
|
||||||
|
* Wed Jan 19 2022 Karolina Surma <ksurma@redhat.com> - 0-54 |
||||||
|
- Include compressed manpages to the package if flag '+auto' is provided to %%pyproject_save_files |
||||||
|
- Fixes: rhbz#2033254 |
||||||
|
|
||||||
|
* Fri Jan 14 2022 Miro Hrončok <mhroncok@redhat.com> - 0-53 |
||||||
|
- %%pyproject_buildrequires: Make -r (include runtime) the default, use -R to opt-out |
||||||
|
|
||||||
|
* Sun Dec 19 2021 Gordon Messmer <gordon.messmer@gmail.com> - 0-52 |
||||||
|
- Handle legacy version specifiers that would previously raise exceptions. |
||||||
|
|
||||||
|
* Wed Dec 08 2021 Miro Hrončok <mhroncok@redhat.com> - 0-51 |
||||||
|
- Define provisional %%pyproject_build_lib |
||||||
|
|
||||||
|
* Mon Nov 1 2021 Gordon Messmer <gordon.messmer@gmail.com> - 0-50 |
||||||
|
- Improve handling of > operator, preventing post-release from satisfying most rpm requirements |
||||||
|
- Improve handling of < operator, preventing pre-release from satisfying rpm requirement |
||||||
|
- Improve handling of != operator with prefix matching, preventing pre-release from satisfying rpm requirements |
||||||
|
|
||||||
|
* Tue Oct 19 2021 Karolina Surma <ksurma@redhat.com> - 0-49 |
||||||
|
- %%pyproject_save_files: Save %%_pyproject_modules file with importable module names |
||||||
|
- Introduce %%pyproject_check_import which passes %%_pyproject_modules to %%py3_check_import |
||||||
|
- Introduce -t, -e filtering options to %%pyproject_check_import |
||||||
|
|
||||||
|
* Sat Oct 16 2021 Miro Hrončok <mhroncok@redhat.com> - 0-48 |
||||||
|
- %%pyproject_buildrequires: Accept installed pre-releases for all requirements |
||||||
|
- Fixes: rhbz#2014639 |
||||||
|
|
||||||
|
* Thu Sep 09 2021 Miro Hrončok <mhroncok@redhat.com> - 0-47 |
||||||
|
- %%pyproject_save_files: Expand the namespace error message, also display it with / |
||||||
|
- %%pyproject_save_files: Add a workaround error for spaces and [brackets] |
||||||
|
|
||||||
|
* Fri Jul 23 2021 Miro Hrončok <miro@hroncok.cz> - 0-46 |
||||||
|
- %%pyproject_buildrequires now fails when it encounters an invalid requirement |
||||||
|
- Fixes: rhbz#1983053 |
||||||
|
- Rename %%_pyproject_ghost_distinfo and %%_pyproject_record to indicate they are private |
||||||
|
- Automatically detect LICENSE files and mark them with %%license macro |
||||||
|
|
||||||
|
* Fri Jul 23 2021 Fedora Release Engineering <releng@fedoraproject.org> - 0-45 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_35_Mass_Rebuild |
||||||
|
|
||||||
|
* Fri Jul 09 2021 Python Maint <python-maint@redhat.com> - 0-44 |
||||||
|
- Escape weird paths generated by %%pyproject_save_files |
||||||
|
- Fixes rhbz#1976363 |
||||||
|
- Support x.* versions in %%pyproject_buildrequires |
||||||
|
- Fixes rhbz#1981558 |
||||||
|
- %%pyproject_buildrequires fallbacks to setuptools only if setup.py exists |
||||||
|
- Fixes: rhbz#1976459 |
||||||
|
- Explicitly require the "basic" Python RPM macros |
||||||
|
|
||||||
|
* Thu Jul 01 2021 Tomas Hrnciar <thrnciar@redhat.com> - 0-43 |
||||||
|
- Generate BuildRequires from file |
||||||
|
- Fixes: rhbz#1936448 |
||||||
|
|
||||||
|
* Tue Jun 29 2021 Miro Hrončok <mhroncok@redhat.com> - 0-42 |
||||||
|
- Don't accidentally treat "~= X.0" requirement as "~= X" |
||||||
|
- Fixes rhbz#1977060 |
||||||
|
|
||||||
|
* Mon Jun 28 2021 Miro Hrončok <mhroncok@redhat.com> - 0-41 |
||||||
|
- Don't leak %%{_pyproject_builddir} to pytest collection |
||||||
|
- Fixes rhbz#1935212 |
||||||
|
|
||||||
|
* Thu May 27 2021 Miro Hrončok <mhroncok@redhat.com> - 0-40 |
||||||
|
- Don't leak $TMPDIR outside of pyproject macros |
||||||
|
- Set %%_pyproject_wheeldir and %%_pyproject_builddir relative to the source tree, not $PWD |
||||||
|
|
||||||
|
* Mon Mar 29 2021 Miro Hrončok <mhroncok@redhat.com> - 0-39 |
||||||
|
- Handle tox provision (tox.requires / tox.minversion) |
||||||
|
- Fixes: rhbz#1922495 |
||||||
|
- Generate BuildRequires on extras in lower case |
||||||
|
- Fixes: rhbz#1937944 |
||||||
|
|
||||||
|
* Sun Feb 07 2021 Miro Hrončok <mhroncok@redhat.com> - 0-38 |
||||||
|
- Include nested __pycache__ directories in %%pyproject_save_files |
||||||
|
- Fixes: rhbz#1925963 |
||||||
|
|
||||||
|
* Tue Feb 02 2021 Miro Hrončok <mhroncok@redhat.com> - 0-37 |
||||||
|
- Remove support for Python 3.7 from %%pyproject_buildrequires |
||||||
|
- Generate python3dist(toml) BR with pyproject.toml earlier to avoid extra install round |
||||||
|
- Generate python3dist(setutpools/wheel) BR without pyproject.toml earlier as well |
||||||
|
|
||||||
|
* Wed Jan 27 2021 Fedora Release Engineering <releng@fedoraproject.org> - 0-36 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_34_Mass_Rebuild |
||||||
|
|
||||||
|
* Fri Jan 15 2021 Miro Hrončok <mhroncok@redhat.com> - 0-35 |
||||||
|
- Update the description of the package to match the new README content |
||||||
|
|
||||||
|
* Fri Dec 04 2020 Miro Hrončok <miro@hroncok.cz> - 0-34 |
||||||
|
- List all files in %%pyproject_files explicitly to avoid duplicate %%lang entries |
||||||
|
- If you amend the installed files after %%pyproject_install, %%pyproject_files might break |
||||||
|
|
||||||
|
* Fri Nov 27 2020 Miro Hrončok <mhroncok@redhat.com> - 0-33 |
||||||
|
- Pass PYTHONDONTWRITEBYTECODE=1 to %%tox to avoid packaged PYTEST bytecode |
||||||
|
|
||||||
|
* Tue Nov 03 2020 Miro Hrončok <mhroncok@redhat.com> - 0-32 |
||||||
|
- Allow multiple -e in %%pyproject_buildrequires |
||||||
|
- Fixes: rhbz#1886509 |
||||||
|
|
||||||
|
* Mon Oct 05 2020 Miro Hrončok <mhroncok@redhat.com> - 0-31 |
||||||
|
- Support PEP 517 list based backend-path |
||||||
|
|
||||||
|
* Tue Sep 29 2020 Lumír Balhar <lbalhar@redhat.com> - 0-30 |
||||||
|
- Process RECORD files in %%pyproject_install and remove them |
||||||
|
- Support the extras configuration option of tox in %%pyproject_buildrequires -t |
||||||
|
- Support multiple -x options for %%pyproject_buildrequires |
||||||
|
- Fixes: rhbz#1877977 |
||||||
|
- Fixes: rhbz#1877978 |
||||||
|
|
||||||
|
* Wed Sep 23 2020 Miro Hrončok <mhroncok@redhat.com> - 0-29 |
||||||
|
- Check the requirements after installing "requires_for_build_wheel" |
||||||
|
- If not checked, installing runtime requirements might fail |
||||||
|
|
||||||
|
* Tue Sep 08 2020 Gordon Messmer <gordon.messmer@gmail.com> - 0-28 |
||||||
|
- Support more Python version specifiers in generated BuildRequires |
||||||
|
- This adds support for the '~=' operator and wildcards |
||||||
|
|
||||||
|
* Fri Sep 04 2020 Miro Hrončok <miro@hroncok.cz> - 0-27 |
||||||
|
- Make code in $PWD importable from %%pyproject_buildrequires |
||||||
|
- Only require toml for projects with pyproject.toml |
||||||
|
- Remove a no longer useful warning for unrecognized files in %%pyproject_save_files |
||||||
|
|
||||||
|
* Mon Aug 24 2020 Tomas Hrnciar <thrnciar@redhat.com> - 0-26 |
||||||
|
- Implement automatic detection of %%lang files in %%pyproject_save_files |
||||||
|
and mark them with %%lang in filelist |
||||||
|
|
||||||
|
* Fri Aug 14 2020 Miro Hrončok <mhroncok@redhat.com> - 0-25 |
||||||
|
- Handle Python Extras in %%pyproject_buildrequires on Fedora 33+ |
||||||
|
|
||||||
|
* Tue Aug 11 2020 Miro Hrončok <mhroncok@redhat.com> - 0-24 |
||||||
|
- Allow multiple, comma-separated extras in %%pyproject_buildrequires -x |
||||||
|
|
||||||
|
* Mon Aug 10 2020 Lumír Balhar <lbalhar@redhat.com> - 0-23 |
||||||
|
- Make macros more universal for alternative Python stacks |
||||||
|
|
||||||
|
* Thu Aug 06 2020 Tomas Hrnciar <thrnciar@redhat.com> - 0-22 |
||||||
|
- Change %%pyproject_save_files +bindir argument to +auto |
||||||
|
to list all unclassified files in filelist |
||||||
|
|
||||||
|
* Tue Aug 04 2020 Miro Hrončok <mhroncok@redhat.com> - 0-21 |
||||||
|
- Actually implement %%pyproject_extras_subpkg |
||||||
|
|
||||||
|
* Wed Jul 29 2020 Miro Hrončok <mhroncok@redhat.com> - 0-20 |
||||||
|
- Implement %%pyproject_extras_subpkg |
||||||
|
|
||||||
|
* Tue Jul 28 2020 Fedora Release Engineering <releng@fedoraproject.org> - 0-19 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_33_Mass_Rebuild |
||||||
|
|
||||||
|
* Thu Jul 16 2020 Miro Hrončok <mhroncok@redhat.com> - 0-18 |
||||||
|
- %%pyproject_buildrequires -x (extras requires for tests) now implies -r |
||||||
|
(runtime requires) instead of erroring without it for better UX. |
||||||
|
|
||||||
|
* Wed Jul 15 2020 Miro Hrončok <mhroncok@redhat.com> - 0-17 |
||||||
|
- Set HOSTNAME to prevent tox 3.17+ from a DNS query |
||||||
|
- Fixes rhbz#1856356 |
||||||
|
|
||||||
|
* Fri Jun 19 2020 Miro Hrončok <mhroncok@redhat.com> - 0-16 |
||||||
|
- Switch from upstream deprecated pytoml to toml |
||||||
|
|
||||||
|
* Thu May 07 2020 Tomas Hrnciar <thrnciar@redhat.com> - 0-15 |
||||||
|
- Adapt %%pyproject_install not to create a PEP 610 direct_url.json file |
||||||
|
|
||||||
|
* Wed Apr 15 2020 Patrik Kopkan <pkopkan@redhat.com> - 0-14 |
||||||
|
- Add %%pyproject_save_file macro for generating file section |
||||||
|
- Handle extracting debuginfo from extension modules (#1806625) |
||||||
|
|
||||||
|
* Mon Mar 02 2020 Miro Hrončok <mhroncok@redhat.com> - 0-13 |
||||||
|
- Tox dependency generator: Handle deps read in from a text file (#1808601) |
||||||
|
|
||||||
|
* Wed Feb 05 2020 Miro Hrončok <mhroncok@redhat.com> - 0-12 |
||||||
|
- Fallback to setuptools.build_meta:__legacy__ backend instead of setuptools.build_meta |
||||||
|
- Properly handle backends with colon |
||||||
|
- Preserve existing flags in shebangs of Python files in /usr/bin |
||||||
|
|
||||||
|
* Thu Jan 30 2020 Fedora Release Engineering <releng@fedoraproject.org> - 0-11 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_32_Mass_Rebuild |
||||||
|
|
||||||
|
* Fri Nov 15 2019 Patrik Kopkan <pkopkan@redhat.com> - 0-10 |
||||||
|
- Install wheel in '$PWD/pyproject-macros-wheeldir' to have more explicit path from which we install. |
||||||
|
- The path can be changed by redefining %%_pyproject_wheeldir. |
||||||
|
|
||||||
|
* Wed Nov 13 2019 Anna Khaitovich <akhaitov@redhat.com> - 0-9 |
||||||
|
- Remove stray __pycache__ directory from /usr/bin when running %%pyproject_install |
||||||
|
|
||||||
|
* Fri Oct 25 2019 Miro Hrončok <mhroncok@redhat.com> - 0-8 |
||||||
|
- When tox fails, print tox output before failing |
||||||
|
|
||||||
|
* Tue Oct 08 2019 Miro Hrončok <mhroncok@redhat.com> - 0-7 |
||||||
|
- Move a verbose line of %%pyproject_buildrequires from stdout to stderr |
||||||
|
|
||||||
|
* Fri Jul 26 2019 Petr Viktorin <pviktori@redhat.com> - 0-6 |
||||||
|
- Use importlib_metadata rather than pip freeze |
||||||
|
|
||||||
|
* Fri Jul 26 2019 Miro Hrončok <mhroncok@redhat.com> - 0-5 |
||||||
|
- Allow to fetch test dependencies from tox |
||||||
|
- Add %%tox macro to invoke tests |
||||||
|
|
||||||
|
* Fri Jul 26 2019 Fedora Release Engineering <releng@fedoraproject.org> - 0-4 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_31_Mass_Rebuild |
||||||
|
|
||||||
|
* Tue Jul 02 2019 Miro Hrončok <mhroncok@redhat.com> - 0-3 |
||||||
|
- Add %%pyproject_buildrequires |
||||||
|
|
||||||
|
* Tue Jul 02 2019 Miro Hrončok <mhroncok@redhat.com> - 0-2 |
||||||
|
- Fix shell syntax errors in %%pyproject_install |
||||||
|
- Drop PATH warning in %%pyproject_install |
||||||
|
|
||||||
|
* Fri Jun 28 2019 Patrik Kopkan <pkopkan@redhat.com> - 0-1 |
||||||
|
- created package |
@ -0,0 +1,247 @@ |
|||||||
|
Name: python-rpm-generators |
||||||
|
Summary: Dependency generators for Python RPMs |
||||||
|
Version: 13 |
||||||
|
Release: 2%{?dist} |
||||||
|
|
||||||
|
# Originally all those files were part of RPM, so license is kept here |
||||||
|
License: GPLv2+ |
||||||
|
Url: https://src.fedoraproject.org/python-rpm-generators |
||||||
|
# Commit is the last change in following files |
||||||
|
Source0: https://raw.githubusercontent.com/rpm-software-management/rpm/102eab50b3d0d6546dfe082eac0ade21e6b3dbf1/COPYING |
||||||
|
Source1: python.attr |
||||||
|
Source2: pythondist.attr |
||||||
|
Source3: pythonname.attr |
||||||
|
Source4: pythondistdeps.py |
||||||
|
Source5: pythonbundles.py |
||||||
|
|
||||||
|
BuildArch: noarch |
||||||
|
|
||||||
|
%description |
||||||
|
%{summary}. |
||||||
|
|
||||||
|
%package -n python3-rpm-generators |
||||||
|
Summary: %{summary} |
||||||
|
#Requires: python3-packaging |
||||||
|
# We have parametric macro generators, we need RPM 4.16 (4.15.90+ is 4.16 alpha) |
||||||
|
Requires: rpm > 4.11.3-0 |
||||||
|
# This contains the Lua functions we use: |
||||||
|
Requires: python-srpm-macros >= 3 |
||||||
|
|
||||||
|
%description -n python3-rpm-generators |
||||||
|
%{summary}. |
||||||
|
|
||||||
|
%prep |
||||||
|
%autosetup -c -T |
||||||
|
cp -a %{sources} . |
||||||
|
|
||||||
|
%install |
||||||
|
mkdir -p %{buildroot}%{_fileattrsdir} |
||||||
|
install -Dpm0644 -t %{buildroot}%{_fileattrsdir} *.attr |
||||||
|
mkdir -p %{buildroot}%{_rpmconfigdir} |
||||||
|
install -Dpm0755 -t %{buildroot}%{_rpmconfigdir} *.py |
||||||
|
|
||||||
|
%files -n python3-rpm-generators |
||||||
|
%license COPYING |
||||||
|
%{_fileattrsdir}/python.attr |
||||||
|
%{_fileattrsdir}/pythondist.attr |
||||||
|
%{_fileattrsdir}/pythonname.attr |
||||||
|
%{_rpmconfigdir}/pythondistdeps.py |
||||||
|
%{_rpmconfigdir}/pythonbundles.py |
||||||
|
|
||||||
|
%changelog |
||||||
|
* Fri Jul 22 2022 Fedora Release Engineering <releng@fedoraproject.org> - 13-2 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_37_Mass_Rebuild |
||||||
|
|
||||||
|
* Thu Jun 02 2022 Miro Hrončok <mhroncok@redhat.com> - 13-1 |
||||||
|
- https://fedoraproject.org/wiki/Changes/PythonDistPEP503ProvidesOnly |
||||||
|
|
||||||
|
* Fri May 27 2022 Miro Hrončok <mhroncok@redhat.com> - 12-15 |
||||||
|
- Don't include all requirements with True-evaluating markers in extras subpackages |
||||||
|
- Fixes: rhbz#2090186 |
||||||
|
|
||||||
|
* Thu Feb 10 2022 Sandro Mani <manisandro@gmail.com> - 12-14 |
||||||
|
- Add namespace option to pythodistdeps.py to allow mingw-python generatros |
||||||
|
|
||||||
|
* Wed Jan 26 2022 Tomas Orsava <torsava@redhat.com> - 12-13 |
||||||
|
- From `python3-foo` packages automatically generate `python3.X-foo` Obsoletes |
||||||
|
tags on CentOS/RHEL |
||||||
|
|
||||||
|
* Fri Jan 21 2022 Fedora Release Engineering <releng@fedoraproject.org> - 12-12 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_36_Mass_Rebuild |
||||||
|
|
||||||
|
* Sun Dec 19 2021 Gordon Messmer <gordon.messmer@gmail.com> - 12-11 |
||||||
|
- Handle legacy version specifiers that would previously raise exceptions. |
||||||
|
|
||||||
|
* Fri Oct 29 2021 Gordon Messmer <gordon.messmer@gmail.com> - 12-10 |
||||||
|
- Additional fix for dev releases. |
||||||
|
|
||||||
|
* Thu Oct 28 2021 Gordon Messmer <gordon.messmer@gmail.com> - 12-9 |
||||||
|
- Sync dependency conversion with upstream pyreq2rpm. |
||||||
|
- Improve handling of > and < operators, and != operator with prefix matching |
||||||
|
|
||||||
|
* Fri Jul 23 2021 Fedora Release Engineering <releng@fedoraproject.org> - 12-8 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_35_Mass_Rebuild |
||||||
|
|
||||||
|
* Tue May 25 2021 Tomas Orsava <torsava@redhat.com> - 12-7 |
||||||
|
- pythondistdeps.py: Detect missing or corrupted metadata |
||||||
|
- pythondistdeps.py: Catch all exceptions and terminate the build if one is raised |
||||||
|
|
||||||
|
* Mon Apr 19 2021 Miro Hrončok <mhroncok@redhat.com> - 12-6 |
||||||
|
- Get rid of distutils deprecation warning (by not using it) |
||||||
|
- The distutils module is deprecated in Python 3.10+ |
||||||
|
- https://www.python.org/dev/peps/pep-0632/ |
||||||
|
|
||||||
|
* Wed Mar 31 2021 Miro Hrončok <mhroncok@redhat.com> - 12-5 |
||||||
|
- Do not generate setuptools requirement for console_scripts on Python 3.10+ |
||||||
|
- See https://fedoraproject.org/wiki/Changes/Reduce_dependencies_on_python3-setuptools |
||||||
|
|
||||||
|
* Thu Mar 11 2021 Tomas Orsava <torsava@redhat.com> - 12-4 |
||||||
|
- scripts/pythondistdeps: Treat extras names case-insensitively and always |
||||||
|
output them in lower case (#1936875) |
||||||
|
|
||||||
|
* Mon Feb 22 2021 Tomas Orsava <torsava@redhat.com> - 12-3 |
||||||
|
- scripts/pythondistdeps: Fix for Python 3.10 |
||||||
|
|
||||||
|
* Wed Feb 17 2021 Tomas Orsava <torsava@redhat.com> - 12-2 |
||||||
|
- scripts/pythondistdeps: Switch from using pkg_resources to importlib.metadata |
||||||
|
for reading the egg/dist-info metadata |
||||||
|
- The script no longer requires setuptools but instead requires packaging |
||||||
|
|
||||||
|
* Wed Feb 03 2021 Miro Hrončok <mhroncok@redhat.com> - 12-1 |
||||||
|
- Disable the dist generators for Python 2 |
||||||
|
- https://fedoraproject.org/wiki/Changes/Disable_Python_2_Dist_RPM_Generators_and_Freeze_Python_2_Macros |
||||||
|
|
||||||
|
* Wed Jan 27 2021 Fedora Release Engineering <releng@fedoraproject.org> - 11-13 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_34_Mass_Rebuild |
||||||
|
|
||||||
|
* Mon Oct 19 2020 Tomas Orsava <torsava@redhat.com> - 11-12 |
||||||
|
- Run scripts in an isolated Python environment (#1889080) |
||||||
|
|
||||||
|
* Wed Jul 29 2020 Fedora Release Engineering <releng@fedoraproject.org> - 11-11 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_33_Mass_Rebuild |
||||||
|
|
||||||
|
* Tue Jul 21 2020 Miro Hrončok <mhroncok@redhat.com> - 11-10 |
||||||
|
- pythondistdeps: Split Python Extras names after the rightmost plus sign |
||||||
|
- pythondistdeps: Handle edge cases of version comparisons more closely to |
||||||
|
upstream, despite irrationality |
||||||
|
See: https://github.com/pypa/packaging/issues/320 |
||||||
|
|
||||||
|
* Fri Jul 10 2020 Tomas Orsava <torsava@redhat.com> - 11-9 |
||||||
|
- pythondistdeps: Implement provides/requires for extras packages |
||||||
|
- Enable --require-extras-subpackages |
||||||
|
- Adapt Python version marker workaround for setuptools 42+ |
||||||
|
|
||||||
|
* Fri Jun 26 2020 Miro Hrončok <mhroncok@redhat.com> - 11-8 |
||||||
|
- Fix python(abi) requires generator, it picked files from almost good directories |
||||||
|
- Add a script to generate Python bundled provides |
||||||
|
|
||||||
|
* Thu May 21 2020 Miro Hrončok <mhroncok@redhat.com> - 11-7 |
||||||
|
- Use PEP 503 names for requires |
||||||
|
|
||||||
|
* Tue May 05 2020 Miro Hrončok <mhroncok@redhat.com> - 11-6 |
||||||
|
- Deduplicate automatically provided names trough Python RPM Lua macros |
||||||
|
|
||||||
|
* Wed Apr 29 2020 Tomas Orsava <torsava@redhat.com> - 11-5 |
||||||
|
- Backporting proposed upstream changes |
||||||
|
https://github.com/rpm-software-management/rpm/pull/1195 |
||||||
|
- Only provide python3dist(..) for the main Python versions (BZ#1812083) |
||||||
|
- Preparation for the proper handling of normalized names (BZ#1791530) |
||||||
|
- Add a test suite (and enable it in Fedora CI) |
||||||
|
- Better error messages for unsupported package versions |
||||||
|
- Fix sorting of dev versions |
||||||
|
|
||||||
|
* Tue Apr 28 2020 Miro Hrončok <mhroncok@redhat.com> - 11-4 |
||||||
|
- Don't define global Lua variables from Python generator |
||||||
|
|
||||||
|
* Mon Apr 20 2020 Gordon Messmer <gordon.messmer@gmail.com> - 11-3 |
||||||
|
- Handle all-zero versions without crashing |
||||||
|
|
||||||
|
* Tue Apr 07 2020 Miro Hrončok <mhroncok@redhat.com> - 11-2 |
||||||
|
- Use dynamic %%_prefix value when matching files for python(abi) provides |
||||||
|
- Sync with upstream RPM dist generator |
||||||
|
|
||||||
|
* Wed Apr 01 2020 Miro Hrončok <mhroncok@redhat.com> - 11-1 |
||||||
|
- Rewrite python(abi) generators to Lua to make them faster |
||||||
|
- RPM 4.16+ is needed |
||||||
|
- Automatically call %%python_provide |
||||||
|
|
||||||
|
* Thu Jan 30 2020 Fedora Release Engineering <releng@fedoraproject.org> - 10-4 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_32_Mass_Rebuild |
||||||
|
|
||||||
|
* Fri Jan 17 2020 Miro Hrončok <mhroncok@redhat.com> - 10-3 |
||||||
|
- Also provide pythonXdist() with PEP 503 normalized names (#1791530) |
||||||
|
|
||||||
|
* Fri Jan 03 2020 Miro Hrončok <mhroncok@redhat.com> - 10-2 |
||||||
|
- Fix more complicated requirement expressions by adding parenthesis |
||||||
|
|
||||||
|
* Wed Jan 01 2020 Miro Hrončok <mhroncok@redhat.com> - 10-1 |
||||||
|
- Handle version ending with ".*" (#1758141) |
||||||
|
- Handle compatible-release operator "~=" (#1758141) |
||||||
|
- Use rich deps for semantically versioned dependencies |
||||||
|
- Match Python version if minor has multiple digits (e.g. 3.10, #1777382) |
||||||
|
- Only add setuptools requirement for egg-info packages |
||||||
|
|
||||||
|
* Fri Jul 26 2019 Fedora Release Engineering <releng@fedoraproject.org> - 9-2 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_31_Mass_Rebuild |
||||||
|
|
||||||
|
* Mon Jun 24 2019 Tomas Orsava <torsava@redhat.com> - 9-1 |
||||||
|
- Canonicalize Python versions and properly handle != spec |
||||||
|
|
||||||
|
* Wed Apr 17 2019 Miro Hrončok <mhroncok@redhat.com> - 8-1 |
||||||
|
- console_scripts entry points to require setuptools |
||||||
|
https://github.com/rpm-software-management/rpm/pull/666 |
||||||
|
|
||||||
|
* Sat Feb 02 2019 Fedora Release Engineering <releng@fedoraproject.org> - 7-2 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_30_Mass_Rebuild |
||||||
|
|
||||||
|
* Thu Dec 20 2018 Igor Gnatenko <ignatenkobrain@fedoraproject.org> - 7-1 |
||||||
|
- Enable requires generator |
||||||
|
|
||||||
|
* Wed Oct 03 2018 Igor Gnatenko <ignatenkobrain@fedoraproject.org> - 6-1 |
||||||
|
- Tighten regex for depgen |
||||||
|
|
||||||
|
* Sat Jul 28 2018 Miro Hrončok <mhroncok@redhat.com> - 5-4 |
||||||
|
- Use nonstandardlib for purelib definition (#1609492) |
||||||
|
|
||||||
|
* Sat Jul 28 2018 Igor Gnatenko <ignatenkobrain@fedoraproject.org> - 5-3 |
||||||
|
- Add pythondist generator |
||||||
|
|
||||||
|
* Sat Jul 14 2018 Fedora Release Engineering <releng@fedoraproject.org> - 5-2 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_29_Mass_Rebuild |
||||||
|
|
||||||
|
* Sun Feb 11 2018 Igor Gnatenko <ignatenkobrain@fedoraproject.org> - 5-1 |
||||||
|
- Fork upstream generators |
||||||
|
- "Fix" support of environment markers |
||||||
|
|
||||||
|
* Fri Feb 09 2018 Fedora Release Engineering <releng@fedoraproject.org> - 4.14.0-2.1 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_28_Mass_Rebuild |
||||||
|
|
||||||
|
* Tue Nov 28 2017 Tomas Orsava <torsava@redhat.com> - 4.14.0-2 |
||||||
|
- Switch bootsrapping macro to a bcond for modularity |
||||||
|
|
||||||
|
* Fri Oct 20 2017 Tomas Orsava <torsava@redhat.com> - 4.14.0-1 |
||||||
|
- Rebase to rpm 4.14.0 final (http://rpm.org/wiki/Releases/4.14.0) |
||||||
|
- Re-synchronize version/release macros with the rpm Fedora package |
||||||
|
|
||||||
|
* Mon Sep 18 2017 Tomas Orsava <torsava@redhat.com> - 4.14.0-0.rc1.1 |
||||||
|
- Update to a new upstream version of RPM |
||||||
|
- Drop upstreamed patches |
||||||
|
- Renumber remaining patches |
||||||
|
|
||||||
|
* Thu Aug 24 2017 Miro Hrončok <mhroncok@redhat.com> - 4.13.0.1-4 |
||||||
|
- Add patch 10: Do not provide pythonXdist for platform-python packages (rhbz#1484607) |
||||||
|
|
||||||
|
* Tue Aug 08 2017 Tomas Orsava <torsava@redhat.com> - 4.13.0.1-3 |
||||||
|
- Add patch 9: Generate requires and provides for platform-python(abi) |
||||||
|
(https://fedoraproject.org/wiki/Changes/Platform_Python_Stack) |
||||||
|
|
||||||
|
* Thu Jul 27 2017 Fedora Release Engineering <releng@fedoraproject.org> - 4.13.0.1-2.1 |
||||||
|
- Rebuilt for https://fedoraproject.org/wiki/Fedora_27_Mass_Rebuild |
||||||
|
|
||||||
|
* Thu May 18 2017 Tomas Orsava <torsava@redhat.com> - 4.13.0.1-2 |
||||||
|
- Added a license file |
||||||
|
- Added a dependency on rpm for the proper directory structure |
||||||
|
- Properly owning the __pycache__ directory |
||||||
|
|
||||||
|
* Tue May 02 2017 Tomas Orsava <torsava@redhat.com> - 4.13.0.1-1 |
||||||
|
- Splitting Python RPM generators from the `rpm` package to standalone one |
Loading…
Reference in new issue