diff --git a/SOURCES/00055-systemtap.patch b/SOURCES/00055-systemtap.patch new file mode 100644 index 00000000..0ab03874 --- /dev/null +++ b/SOURCES/00055-systemtap.patch @@ -0,0 +1,822 @@ +diff -up Python-3.3.0rc2/configure.ac.systemtap Python-3.3.0rc2/configure.ac +--- Python-3.3.0rc2/configure.ac.systemtap 2012-09-09 05:11:14.000000000 -0400 ++++ Python-3.3.0rc2/configure.ac 2012-09-10 09:17:21.114511781 -0400 +@@ -2678,6 +2678,23 @@ if test "$with_valgrind" != no; then + OPT="-DDYNAMIC_ANNOTATIONS_ENABLED=1 $OPT" + fi + ++# Check for systemtap support ++# On Linux, /usr/bin/dtrace is in fact a shim to SystemTap ++AC_MSG_CHECKING([for --with-systemtap]) ++AC_ARG_WITH([systemtap], ++ AC_HELP_STRING([--with(out)-systemtap], [disable/enable SystemTap support]),, ++ with_systemtap=no) ++AC_MSG_RESULT([$with_systemtap]) ++if test "$with_systemtap" != no; then ++ AC_DEFINE(WITH_SYSTEMTAP, 1, ++ [Define if you want to compile in SystemTap support]) ++ SYSTEMTAPOBJS="Python/pysystemtap.o" ++ SYSTEMTAPDEPS="\$(srcdir)/Python/pysystemtap.h" ++fi ++ ++AC_SUBST(SYSTEMTAPOBJS) ++AC_SUBST(SYSTEMTAPDEPS) ++ + # -I${DLINCLDIR} is added to the compile rule for importdl.o + AC_SUBST(DLINCLDIR) + DLINCLDIR=. +diff -up Python-3.3.0rc2/configure.systemtap Python-3.3.0rc2/configure +--- Python-3.3.0rc2/configure.systemtap 2012-09-09 05:11:14.000000000 -0400 ++++ Python-3.3.0rc2/configure 2012-09-10 09:17:21.116511780 -0400 +@@ -618,6 +618,8 @@ TRUE + MACHDEP_OBJS + DYNLOADFILE + DLINCLDIR ++SYSTEMTAPDEPS ++SYSTEMTAPOBJS + THREADOBJ + LDLAST + USE_THREAD_MODULE +@@ -779,6 +781,7 @@ with_doc_strings + with_tsc + with_pymalloc + with_valgrind ++with_systemtap + with_fpectl + with_libm + with_libc +@@ -1456,6 +1459,7 @@ Optional Packages: + --with(out)-tsc enable/disable timestamp counter profile + --with(out)-pymalloc disable/enable specialized mallocs + --with-valgrind Enable Valgrind support ++ --with(out)-systemtap disable/enable SystemTap support + --with-fpectl enable SIGFPE catching + --with-libm=STRING math library + --with-libc=STRING C library +@@ -10065,6 +10069,31 @@ fi + OPT="-DDYNAMIC_ANNOTATIONS_ENABLED=1 $OPT" + fi + ++# Check for systemtap support ++# On Linux, /usr/bin/dtrace is in fact a shim to SystemTap ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-systemtap" >&5 ++$as_echo_n "checking for --with-systemtap... " >&6; } ++ ++# Check whether --with-systemtap was given. ++if test "${with_systemtap+set}" = set; then : ++ withval=$with_systemtap; ++else ++ with_systemtap=no ++fi ++ ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_systemtap" >&5 ++$as_echo "$with_systemtap" >&6; } ++if test "$with_systemtap" != no; then ++ ++$as_echo "#define WITH_SYSTEMTAP 1" >>confdefs.h ++ ++ SYSTEMTAPOBJS="Python/pysystemtap.o" ++ SYSTEMTAPDEPS="\$(srcdir)/Python/pysystemtap.h" ++fi ++ ++ ++ ++ + # -I${DLINCLDIR} is added to the compile rule for importdl.o + + DLINCLDIR=. +diff -up Python-3.3.0rc2/Doc/howto/index.rst.systemtap Python-3.3.0rc2/Doc/howto/index.rst +--- Python-3.3.0rc2/Doc/howto/index.rst.systemtap 2012-09-09 05:10:51.000000000 -0400 ++++ Python-3.3.0rc2/Doc/howto/index.rst 2012-09-10 09:17:21.117511779 -0400 +@@ -29,4 +29,5 @@ Currently, the HOWTOs are: + argparse.rst + ipaddress.rst + clinic.rst ++ instrumentation.rst + +diff -up Python-3.3.0rc2/Doc/howto/instrumentation.rst.systemtap Python-3.3.0rc2/Doc/howto/instrumentation.rst +--- Python-3.3.0rc2/Doc/howto/instrumentation.rst.systemtap 2012-09-10 09:17:21.117511779 -0400 ++++ Python-3.3.0rc2/Doc/howto/instrumentation.rst 2012-09-10 09:17:21.117511779 -0400 +@@ -0,0 +1,295 @@ ++.. _instrumentation: ++ ++==================================== ++Instrumenting CPython with SystemTap ++==================================== ++ ++:author: David Malcolm ++ ++DTrace and SystemTap are monitoring tools, each providing a way to inspect ++what the processes on a computer system are doing. They both use ++domain-specific languages allowing a user to write scripts which: ++ ++ - filter which processes are to be observed ++ - gather data from the processes of interest ++ - generate reports on the data ++ ++As of Python 3.3, CPython can be built with embedded "markers" that can be ++observed by a SystemTap script, making it easier to monitor what the CPython ++processes on a system are doing. ++ ++.. Potentially this document could be expanded to also cover DTrace markers. ++ However, I'm not a DTrace expert. ++ ++.. I'm using ".. code-block:: c" for SystemTap scripts, as "c" is syntactically ++ the closest match that Sphinx supports ++ ++ ++Enabling the static markers ++--------------------------- ++ ++In order to build CPython with the embedded markers for SystemTap, the ++SystemTap development tools must be installed. ++ ++On a Fedora or Red Hat Enterprise Linux machine, this can be done via:: ++ ++ yum install systemtap-sdt-devel ++ ++CPython must then be configured `--with-systemtap`:: ++ ++ checking for --with-systemtap... yes ++ ++You can verify if the SystemTap static markers are present in the built ++binary by seeing if it contains a ".note.stapsdt" section. ++ ++.. code-block:: bash ++ ++ $ eu-readelf -S ./python | grep .note.stapsdt ++ [29] .note.stapsdt NOTE 0000000000000000 00308d78 000000b8 0 0 0 4 ++ ++If you've built python as a shared library (with --enable-shared), you need ++to look instead within the shared library. For example: ++ ++.. code-block:: bash ++ ++ $ eu-readelf -S libpython3.3dm.so.1.0 | grep .note.stapsdt ++ [28] .note.stapsdt NOTE 0000000000000000 00365b68 000000b8 0 0 0 4 ++ ++Earlier versions of SystemTap stored the markers in a ".probes" section. ++ ++For the curious, you can see the metadata for the static markers using this ++invocation. ++ ++.. code-block:: bash ++ ++ $ eu-readelf -x .note.stapsdt ./python ++ ++ Hex dump of section [29] '.note.stapsdt', 184 bytes at offset 0x308d78: ++ 0x00000000 08000000 45000000 03000000 73746170 ....E.......stap ++ 0x00000010 73647400 d4664b00 00000000 4fc36600 sdt..fK.....O.f. ++ 0x00000020 00000000 488d9000 00000000 70797468 ....H.......pyth ++ 0x00000030 6f6e0066 756e6374 696f6e5f 5f656e74 on.function__ent ++ 0x00000040 72790038 40257261 78203840 25726478 ry.8@%rax 8@%rdx ++ 0x00000050 202d3440 25656378 00000000 08000000 -4@%ecx........ ++ 0x00000060 46000000 03000000 73746170 73647400 F.......stapsdt. ++ 0x00000070 0d674b00 00000000 4fc36600 00000000 .gK.....O.f..... ++ 0x00000080 4a8d9000 00000000 70797468 6f6e0066 J.......python.f ++ 0x00000090 756e6374 696f6e5f 5f726574 75726e00 unction__return. ++ 0x000000a0 38402572 61782038 40257264 78202d34 8@%rax 8@%rdx -4 ++ 0x000000b0 40256563 78000000 @%ecx... ++ ++and a sufficiently modern eu-readelf can print the metadata: ++ ++.. code-block:: bash ++ ++ $ eu-readelf -n ./python ++ ++ Note section [ 1] '.note.gnu.build-id' of 36 bytes at offset 0x190: ++ Owner Data size Type ++ GNU 20 GNU_BUILD_ID ++ Build ID: a28f8db1b224530b0d38ad7b82a249cf7c3f18d6 ++ ++ Note section [27] '.note.stapsdt' of 184 bytes at offset 0x1ae884: ++ Owner Data size Type ++ stapsdt 70 Version: 3 ++ PC: 0xe0d3a, Base: 0x14b150, Semaphore: 0x3ae882 ++ Provider: python, Name: function__return, Args: '8@%rbx 8@%r13 -4@%eax' ++ stapsdt 69 Version: 3 ++ PC: 0xe0f37, Base: 0x14b150, Semaphore: 0x3ae880 ++ Provider: python, Name: function__entry, Args: '8@%rbx 8@%r13 -4@%eax' ++ ++The above metadata contains information for SystemTap describing how it can ++patch strategically-placed machine code instructions to enable the tracing ++hooks used by a SystemTap script. ++ ++ ++Static markers ++-------------- ++ ++The low-level way to use the SystemTap integration is to use the static ++markers directly. This requires you to explicitly state the binary file ++containing them. ++ ++For example, this script can be used to show the call/return hierarchy of a ++Python script: ++ ++.. code-block:: c ++ ++ probe process('python').mark("function__entry") { ++ filename = user_string($arg1); ++ funcname = user_string($arg2); ++ lineno = $arg3; ++ ++ printf("%s => %s in %s:%d\\n", ++ thread_indent(1), funcname, filename, lineno); ++ } ++ ++ probe process('python').mark("function__return") { ++ filename = user_string($arg1); ++ funcname = user_string($arg2); ++ lineno = $arg3; ++ ++ printf("%s <= %s in %s:%d\\n", ++ thread_indent(-1), funcname, filename, lineno); ++ } ++ ++It can be invoked like this: ++ ++.. code-block:: bash ++ ++ $ stap \ ++ show-call-hierarchy.stp \ ++ -c ./python test.py ++ ++The output looks like this:: ++ ++ 11408 python(8274): => __contains__ in Lib/_abcoll.py:362 ++ 11414 python(8274): => __getitem__ in Lib/os.py:425 ++ 11418 python(8274): => encode in Lib/os.py:490 ++ 11424 python(8274): <= encode in Lib/os.py:493 ++ 11428 python(8274): <= __getitem__ in Lib/os.py:426 ++ 11433 python(8274): <= __contains__ in Lib/_abcoll.py:366 ++ ++where the columns are: ++ ++ - time in microseconds since start of script ++ ++ - name of executable ++ ++ - PID of process ++ ++and the remainder indicates the call/return hierarchy as the script executes. ++ ++For a `--enable-shared` build of CPython, the markers are contained within the ++libpython shared library, and the probe's dotted path needs to reflect this. For ++example, this line from the above example:: ++ ++ probe process('python').mark("function__entry") { ++ ++should instead read:: ++ ++ probe process('python').library("libpython3.3dm.so.1.0").mark("function__entry") { ++ ++(assuming a debug build of CPython 3.3) ++ ++.. I'm reusing the "c:function" type for markers ++ ++.. c:function:: function__entry(str filename, str funcname, int lineno) ++ ++ This marker indicates that execution of a Python function has begun. It is ++ only triggered for pure-python (bytecode) functions. ++ ++ The filename, function name, and line number are provided back to the ++ tracing script as positional arguments, which must be accessed using ++ `$arg1`, `$arg2`: ++ ++ * `$arg1` : `(const char *)` filename, accessible using `user_string($arg1)` ++ ++ * `$arg2` : `(const char *)` function name, accessible using ++ `user_string($arg2)` ++ ++ * `$arg3` : `int` line number ++ ++ * `$arg4` : `(PyFrameObject *)`, the frame being executed ++ ++.. c:function:: function__return(str filename, str funcname, int lineno) ++ ++ This marker is the converse of `function__entry`, and indicates that ++ execution of a Python function has ended (either via ``return``, or via an ++ exception). It is only triggered for pure-python (bytecode) functions. ++ ++ The arguments are the same as for `function__entry` ++ ++ ++Tapsets ++------- ++ ++The higher-level way to use the SystemTap integration is to use a "tapset": ++SystemTap's equivalent of a library, which hides some of the lower-level ++details of the static markers. ++ ++Here is a tapset file, based on a non-shared build of CPython: ++ ++.. code-block:: c ++ ++ /* ++ Provide a higher-level wrapping around the function__entry and ++ function__return markers: ++ */ ++ probe python.function.entry = process("python").mark("function__entry") ++ { ++ filename = user_string($arg1); ++ funcname = user_string($arg2); ++ lineno = $arg3; ++ frameptr = $arg4 ++ } ++ probe python.function.return = process("python").mark("function__return") ++ { ++ filename = user_string($arg1); ++ funcname = user_string($arg2); ++ lineno = $arg3; ++ frameptr = $arg4 ++ } ++ ++If this file is installed in SystemTap's tapset directory (e.g. ++`/usr/share/systemtap/tapset`), then these additional probepoints become ++available: ++ ++.. c:function:: python.function.entry(str filename, str funcname, int lineno, frameptr) ++ ++ This probe point indicates that execution of a Python function has begun. ++ It is only triggered for pure-python (bytecode) functions. ++ ++.. c:function:: python.function.return(str filename, str funcname, int lineno, frameptr) ++ ++ This probe point is the converse of `python.function.return`, and indicates ++ that execution of a Python function has ended (either via ``return``, or ++ via an exception). It is only triggered for pure-python (bytecode) functions. ++ ++ ++Examples ++-------- ++This SystemTap script uses the tapset above to more cleanly implement the ++example given above of tracing the Python function-call hierarchy, without ++needing to directly name the static markers: ++ ++.. code-block:: c ++ ++ probe python.function.entry ++ { ++ printf("%s => %s in %s:%d\n", ++ thread_indent(1), funcname, filename, lineno); ++ } ++ ++ probe python.function.return ++ { ++ printf("%s <= %s in %s:%d\n", ++ thread_indent(-1), funcname, filename, lineno); ++ } ++ ++ ++The following script uses the tapset above to provide a top-like view of all ++running CPython code, showing the top 20 most frequently-entered bytecode ++frames, each second, across the whole system: ++ ++.. code-block:: c ++ ++ global fn_calls; ++ ++ probe python.function.entry ++ { ++ fn_calls[pid(), filename, funcname, lineno] += 1; ++ } ++ ++ probe timer.ms(1000) { ++ printf("\033[2J\033[1;1H") /* clear screen */ ++ printf("%6s %80s %6s %30s %6s\n", ++ "PID", "FILENAME", "LINE", "FUNCTION", "CALLS") ++ foreach ([pid, filename, funcname, lineno] in fn_calls- limit 20) { ++ printf("%6d %80s %6d %30s %6d\n", ++ pid, filename, lineno, funcname, ++ fn_calls[pid, filename, funcname, lineno]); ++ } ++ delete fn_calls; ++ } ++ +diff -up Python-3.3.0rc2/Lib/test/test_systemtap.py.systemtap Python-3.3.0rc2/Lib/test/test_systemtap.py +--- Python-3.3.0rc2/Lib/test/test_systemtap.py.systemtap 2012-09-10 09:17:21.117511779 -0400 ++++ Python-3.3.0rc2/Lib/test/test_systemtap.py 2012-09-10 09:17:21.117511779 -0400 +@@ -0,0 +1,234 @@ ++# Verify that systemtap static probes work ++# ++import subprocess ++import sys ++import sysconfig ++import os ++import unittest ++ ++from test.support import run_unittest, TESTFN, unlink ++ ++if '--with-systemtap' not in sysconfig.get_config_var('CONFIG_ARGS'): ++ raise unittest.SkipTest("Python was not configured --with-systemtap") ++ ++try: ++ _, stap_version = subprocess.Popen(["stap", "-V"], ++ stdout=subprocess.PIPE, ++ stderr=subprocess.PIPE, ++ ).communicate() ++except OSError: ++ # This is what "no stap" looks like. There may, however, be other ++ # errors that manifest this way too. ++ raise unittest.SkipTest("Couldn't find stap on the path") ++ ++def invoke_systemtap_script(script, cmd): ++ # Start a child process, probing with the given systemtap script ++ # (passed as stdin to the "stap" tool) ++ # The script should be a bytes instance ++ # Return (stdout, stderr) pair ++ ++ p = subprocess.Popen(["stap", "-", '-vv', '-c', cmd], ++ stdin=subprocess.PIPE, ++ stdout=subprocess.PIPE, ++ stderr=subprocess.PIPE) ++ out, err = p.communicate(input=script) ++ return out, err ++ ++# Verify that stap can run a simple "hello world"-style script ++# This can fail for various reasons: ++# - missing kernel headers ++# - permissions (a non-root user needs to be in the "stapdev" group) ++TRIVIAL_STAP_SCRIPT = b'probe begin { println("hello world") exit () }' ++ ++out, err = invoke_systemtap_script(TRIVIAL_STAP_SCRIPT, 'true') ++if out != b'hello world\n': ++ raise unittest.SkipTest("Test systemtap script did not run; stderr was: %s" % err) ++ ++# We don't expect stderr to be empty, since we're invoking stap with "-vv": stap ++# will (we hope) generate debugging output on stderr. ++ ++def invoke_python_under_systemtap(script, pythoncode=None, pythonfile=None): ++ # Start a child python process, probing with the given systemtap script ++ # (passed as stdin to the "stap" tool) ++ # The script should be a bytes instance ++ # Return (stdout, stderr) pair ++ ++ if pythonfile: ++ pythoncmd = '%s %s' % (sys.executable, pythonfile) ++ else: ++ pythoncmd = '%s -c %r' % (sys.executable, pythoncode) ++ ++ # The process tree of a stap invocation of a command goes through ++ # something like this: ++ # stap ->fork/exec(staprun; exec stapio ->f/e(-c cmd); exec staprun -r) ++ # and this trip through setuid leads to LD_LIBRARY_PATH being dropped, ++ # which would lead to an --enable-shared build of python failing to be ++ # find its libpython, with an error like: ++ # error while loading shared libraries: libpython3.3dm.so.1.0: cannot ++ # open shared object file: No such file or directory ++ # Hence we need to jump through some hoops to expose LD_LIBRARY_PATH to ++ # the invoked python process: ++ LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') ++ if LD_LIBRARY_PATH: ++ pythoncmd = 'env LD_LIBRARY_PATH=%s ' % LD_LIBRARY_PATH + pythoncmd ++ ++ return invoke_systemtap_script(script, pythoncmd) ++ ++# When using the static markers, we need to supply the prefix of a systemtap ++# dotted probe point that containing the marker. ++# See http://sourceware.org/systemtap/langref/Probe_points.html ++# ++# We need to determine if this is a shared-library build ++# ++# Note that sysconfig can get this wrong; see: ++# http://bugs.python.org/issue14774 ++# ++if '--enable-shared' in sysconfig.get_config_var('CONFIG_ARGS'): ++ # For a shared-library build, the markers are in library(INSTSONAME): ++ INSTSONAME = sysconfig.get_config_var('INSTSONAME') ++ probe_prefix = 'process("%s").library("%s")' % (sys.executable, INSTSONAME) ++else: ++ # For a non-shared-library build, we can simply use sys.executable: ++ probe_prefix = 'process("%s")' % sys.executable ++ ++# The following script ought to generate lots of lines showing recursive ++# function entry and return, of the form: ++# 11408 python(8274): => __contains__ in Lib/_abcoll.py:362 ++# 11414 python(8274): => __getitem__ in Lib/os.py:425 ++# 11418 python(8274): => encode in Lib/os.py:490 ++# 11424 python(8274): <= encode in Lib/os.py:493 ++# 11428 python(8274): <= __getitem__ in Lib/os.py:426 ++# 11433 python(8274): <= __contains__ in Lib/_abcoll.py:366 ++# where the column are: ++# - time in microseconds since start of script ++# - name of executable ++# - PID of process ++# and the remainder indicates the call/return hierarchy ++ ++hierarchy_script = (''' ++probe %s.mark("function__entry") { ++ filename = user_string($arg1); ++ funcname = user_string($arg2); ++ lineno = $arg3; ++ ++ printf("%%s => %%s in %%s:%%d\\n", thread_indent(1), funcname, filename, lineno); ++} ++ ++probe %s.mark("function__return") { ++ filename = user_string($arg1); ++ funcname = user_string($arg2); ++ lineno = $arg3; ++ ++ printf("%%s <= %%s in %%s:%%d\\n", thread_indent(-1), funcname, filename, lineno); ++} ++''' % (probe_prefix, probe_prefix)).encode('utf-8') ++ ++ ++class ErrorDumper: ++ # A context manager that dumps extra information if an exception is raised, ++ # to help track down why the problem occurred ++ def __init__(self, out, err): ++ self.out = out ++ self.err = err ++ ++ def __enter__(self): ++ pass ++ ++ def __exit__(self, type_, value, traceback): ++ if type_: ++ # an exception is being raised: ++ print('stdout: %s' % out.decode()) ++ print('stderr: %s' % err.decode()) ++ ++class SystemtapTests(unittest.TestCase): ++ ++ def test_invoking_python(self): ++ # Ensure that we can invoke python under stap, with a trivial stap ++ # script: ++ out, err = invoke_python_under_systemtap( ++ b'probe begin { println("hello from stap") exit () }', ++ pythoncode="print('hello from python')") ++ with ErrorDumper(out, err): ++ self.assertIn(b'hello from stap', out) ++ self.assertIn(b'hello from python', out) ++ ++ def test_function_entry(self): ++ # Ensure that the function_entry static marker works ++ out, err = invoke_python_under_systemtap(hierarchy_script) ++ # stdout ought to contain various lines showing recursive function ++ # entry and return (see above) ++ ++ # Uncomment this for debugging purposes: ++ # print(out.decode('utf-8')) ++ ++ # Executing the cmdline-supplied "pass": ++ # 0 python(8274): => in :1 ++ # 5 python(8274): <= in :1 ++ with ErrorDumper(out, err): ++ self.assertIn(b'=> in :1', out, ++ msg="stdout: %s\nstderr: %s\n" % (out, err)) ++ ++ def test_function_encoding(self): ++ # Ensure that function names containing non-Latin 1 code ++ # points are handled: ++ pythonfile = TESTFN ++ try: ++ unlink(pythonfile) ++ f = open(pythonfile, "wb") ++ f.write(""" ++# Sample script with non-ASCII filename, for use by test_systemtap.py ++# Implicitly UTF-8 ++ ++def 文字化け(): ++ '''Function with non-ASCII identifier; I believe this reads "mojibake"''' ++ print("hello world!") ++ ++文字化け() ++""".encode('utf-8')) ++ f.close() ++ ++ out, err = invoke_python_under_systemtap(hierarchy_script, ++ pythonfile=pythonfile) ++ out_utf8 = out.decode('utf-8') ++ with ErrorDumper(out, err): ++ self.assertIn('=> in %s:5' % pythonfile, out_utf8) ++ self.assertIn(' => 文字化け in %s:5' % pythonfile, out_utf8) ++ self.assertIn(' <= 文字化け in %s:7' % pythonfile, out_utf8) ++ self.assertIn('<= in %s:9' % pythonfile, out_utf8) ++ finally: ++ unlink(pythonfile) ++ ++ @unittest.skipIf(sys.getfilesystemencoding() == 'ascii', ++ 'the test filename is not encodable with ASCII') ++ def test_filename_encoding(self): ++ # Ensure that scripts names containing non-Latin 1 code ++ # points are handled: ++ pythonfile = TESTFN + '_☠.py' ++ try: ++ unlink(pythonfile) ++ f = open(pythonfile, "wb") ++ f.write(""" ++def foo(): ++ '''Function with non-ASCII identifier; I believe this reads "mojibake"''' ++ print("hello world!") ++ ++foo() ++""".encode('utf-8')) ++ f.close() ++ ++ out, err = invoke_python_under_systemtap(hierarchy_script, ++ pythonfile=pythonfile) ++ out_utf8 = out.decode('utf-8') ++ with ErrorDumper(out, err): ++ self.assertIn('=> in %s:2' % pythonfile, out_utf8) ++ self.assertIn(' => foo in %s:2' % pythonfile, out_utf8) ++ self.assertIn(' <= foo in %s:4' % pythonfile, out_utf8) ++ self.assertIn('<= in %s:6' % pythonfile, out_utf8) ++ finally: ++ unlink(pythonfile) ++ ++def test_main(): ++ run_unittest(SystemtapTests) ++ ++if __name__ == "__main__": ++ test_main() +diff -up Python-3.3.0rc2/Makefile.pre.in.systemtap Python-3.3.0rc2/Makefile.pre.in +--- Python-3.3.0rc2/Makefile.pre.in.systemtap 2012-09-09 05:11:05.000000000 -0400 ++++ Python-3.3.0rc2/Makefile.pre.in 2012-09-10 09:19:51.195501518 -0400 +@@ -363,6 +363,7 @@ PYTHON_OBJS= \ + Python/formatter_unicode.o \ + Python/fileutils.o \ + Python/$(DYNLOADFILE) \ ++ @SYSTEMTAPOBJS@ \ + $(LIBOBJS) \ + $(MACHDEP_OBJS) \ + $(THREADOBJ) +@@ -713,7 +714,8 @@ Objects/setobject.o: $(srcdir)/Objects/s + $(OPCODETARGETS_H): $(OPCODETARGETGEN_FILES) + $(OPCODETARGETGEN) $(OPCODETARGETS_H) + +-Python/ceval.o: $(OPCODETARGETS_H) $(srcdir)/Python/ceval_gil.h ++Python/ceval.o: $(OPCODETARGETS_H) $(srcdir)/Python/ceval_gil.h \ ++ $(srcdir)/Python/ceval_systemtap.h @SYSTEMTAPDEPS@ + + Python/frozen.o: Python/importlib.h Python/importlib_external.h + +@@ -724,6 +726,13 @@ Objects/typeobject.o: $(srcdir)/Objects/ + Objects/typeslots.inc: $(srcdir)/Include/typeslots.h $(srcdir)/Objects/typeslots.py + $(PYTHON) $(srcdir)/Objects/typeslots.py < $(srcdir)/Include/typeslots.h > Objects/typeslots.inc + ++# Only needed with --with-systemtap; not a public header: ++$(srcdir)/Python/pysystemtap.h: $(srcdir)/Python/pysystemtap.d ++ dtrace -o $@ $(DFLAGS) -C -h -s $(srcdir)/Python/pysystemtap.d ++ ++Python/pysystemtap.o: $(srcdir)/Python/pysystemtap.d Python/ceval.o ++ dtrace -o $@ $(DFLAGS) -C -G -s $(srcdir)/Python/pysystemtap.d Python/ceval.o ++ + ############################################################################ + # Header files + +@@ -1345,6 +1354,7 @@ clean: pycremoval + -rm -f Lib/lib2to3/*Grammar*.pickle + -rm -f Programs/_testembed Programs/_freeze_importlib + -rm -rf build ++ -rm -f $(srcdir)/Python/pysystemtap.h + + profile-removal: + find . -name '*.gc??' -exec rm -f {} ';' +diff -up Python-3.3.0rc2/pyconfig.h.in.systemtap Python-3.3.0rc2/pyconfig.h.in +--- Python-3.3.0rc2/pyconfig.h.in.systemtap 2012-09-09 05:11:14.000000000 -0400 ++++ Python-3.3.0rc2/pyconfig.h.in 2012-09-10 09:17:21.120511781 -0400 +@@ -1306,6 +1306,9 @@ + /* Define if you want to compile in Python-specific mallocs */ + #undef WITH_PYMALLOC + ++/* Define if you want to compile in SystemTap support */ ++#undef WITH_SYSTEMTAP ++ + /* Define if you want to compile in rudimentary thread support */ + #undef WITH_THREAD + +diff -up Python-3.3.0rc2/Python/ceval.c.systemtap Python-3.3.0rc2/Python/ceval.c +--- Python-3.3.0rc2/Python/ceval.c.systemtap 2012-09-09 05:11:12.000000000 -0400 ++++ Python-3.3.0rc2/Python/ceval.c 2012-09-10 09:17:21.122511781 -0400 +@@ -18,6 +18,8 @@ + + #include + ++#include "ceval_systemtap.h" ++ + #ifndef WITH_TSC + + #define READ_TIMESTAMP(var) +@@ -1160,6 +1162,10 @@ PyEval_EvalFrameEx(PyFrameObject *f, int + } + } + ++ if (PYTHON_FUNCTION_ENTRY_ENABLED()) { ++ systemtap_function_entry(f); ++ } ++ + co = f->f_code; + names = co->co_names; + consts = co->co_consts; +@@ -3077,6 +3083,11 @@ fast_yield: + + /* pop frame */ + exit_eval_frame: ++ ++ if (PYTHON_FUNCTION_RETURN_ENABLED()) { ++ systemtap_function_return(f); ++ } ++ + Py_LeaveRecursiveCall(); + f->f_executing = 0; + tstate->frame = f->f_back; +diff -up Python-3.3.0rc2/Python/ceval_systemtap.h.systemtap Python-3.3.0rc2/Python/ceval_systemtap.h +--- Python-3.3.0rc2/Python/ceval_systemtap.h.systemtap 2012-09-10 09:17:21.122511781 -0400 ++++ Python-3.3.0rc2/Python/ceval_systemtap.h 2012-09-10 09:17:21.122511781 -0400 +@@ -0,0 +1,86 @@ ++/* ++ Support for SystemTap static markers ++*/ ++ ++#ifdef WITH_SYSTEMTAP ++ ++#include "pysystemtap.h" ++ ++/* ++ A struct to hold all of the information gathered when one of the traceable ++ markers is triggered ++*/ ++struct frame_marker_info ++{ ++ PyObject *filename_obj; ++ PyObject *funcname_obj; ++ const char *filename; ++ const char *funcname; ++ int lineno; ++}; ++ ++static void ++get_frame_marker_info(PyFrameObject *f, struct frame_marker_info *fmi) ++{ ++ PyObject *ptype; ++ PyObject *pvalue; ++ PyObject *ptraceback; ++ ++ PyErr_Fetch(&ptype, &pvalue, &ptraceback); ++ ++ fmi->filename_obj = PyUnicode_EncodeFSDefault(f->f_code->co_filename); ++ if (fmi->filename_obj) { ++ fmi->filename = PyBytes_AsString(fmi->filename_obj); ++ } else { ++ fmi->filename = NULL; ++ } ++ ++ fmi->funcname_obj = PyUnicode_AsUTF8String(f->f_code->co_name); ++ if (fmi->funcname_obj) { ++ fmi->funcname = PyBytes_AsString(fmi->funcname_obj); ++ } else { ++ fmi->funcname = NULL; ++ } ++ ++ fmi->lineno = PyCode_Addr2Line(f->f_code, f->f_lasti); ++ ++ PyErr_Restore(ptype, pvalue, ptraceback); ++ ++} ++ ++static void ++release_frame_marker_info(struct frame_marker_info *fmi) ++{ ++ Py_XDECREF(fmi->filename_obj); ++ Py_XDECREF(fmi->funcname_obj); ++} ++ ++static void ++systemtap_function_entry(PyFrameObject *f) ++{ ++ struct frame_marker_info fmi; ++ get_frame_marker_info(f, &fmi); ++ PYTHON_FUNCTION_ENTRY(fmi.filename, fmi.funcname, fmi.lineno, f); ++ release_frame_marker_info(&fmi); ++} ++ ++static void ++systemtap_function_return(PyFrameObject *f) ++{ ++ struct frame_marker_info fmi; ++ get_frame_marker_info(f, &fmi); ++ PYTHON_FUNCTION_RETURN(fmi.filename, fmi.funcname, fmi.lineno, f); ++ release_frame_marker_info(&fmi); ++} ++ ++#else /* #ifdef WITH_SYSTEMTAP */ ++ ++/* ++ When configured --without-systemtap, everything compiles away to nothing: ++*/ ++#define PYTHON_FUNCTION_ENTRY_ENABLED() 0 ++#define PYTHON_FUNCTION_RETURN_ENABLED() 0 ++#define systemtap_function_entry(f) ++#define systemtap_function_return(f) ++ ++#endif +diff -up Python-3.3.0rc2/Python/pysystemtap.d.systemtap Python-3.3.0rc2/Python/pysystemtap.d +--- Python-3.3.0rc2/Python/pysystemtap.d.systemtap 2012-09-10 09:17:21.122511781 -0400 ++++ Python-3.3.0rc2/Python/pysystemtap.d 2012-09-10 09:17:21.122511781 -0400 +@@ -0,0 +1,4 @@ ++provider python { ++ probe function__entry(const char *, const char *, int, PyFrameObject *); ++ probe function__return(const char *, const char *, int, PyFrameObject *); ++}; diff --git a/SOURCES/00102-lib64.patch b/SOURCES/00102-lib64.patch new file mode 100644 index 00000000..476d2ee6 --- /dev/null +++ b/SOURCES/00102-lib64.patch @@ -0,0 +1,188 @@ +diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py +index 9474e9c..c0ce4c6 100644 +--- a/Lib/distutils/command/install.py ++++ b/Lib/distutils/command/install.py +@@ -30,14 +30,14 @@ WINDOWS_SCHEME = { + INSTALL_SCHEMES = { + 'unix_prefix': { + 'purelib': '$base/lib/python$py_version_short/site-packages', +- 'platlib': '$platbase/lib/python$py_version_short/site-packages', ++ 'platlib': '$platbase/lib64/python$py_version_short/site-packages', + 'headers': '$base/include/python$py_version_short$abiflags/$dist_name', + 'scripts': '$base/bin', + 'data' : '$base', + }, + 'unix_home': { + 'purelib': '$base/lib/python', +- 'platlib': '$base/lib/python', ++ 'platlib': '$base/lib64/python', + 'headers': '$base/include/python/$dist_name', + 'scripts': '$base/bin', + 'data' : '$base', +diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py +index 026cca7..6d3e077 100644 +--- a/Lib/distutils/sysconfig.py ++++ b/Lib/distutils/sysconfig.py +@@ -132,8 +132,12 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): + prefix = plat_specific and EXEC_PREFIX or PREFIX + + if os.name == "posix": ++ if plat_specific or standard_lib: ++ lib = "lib64" ++ else: ++ lib = "lib" + libpython = os.path.join(prefix, +- "lib", "python" + get_python_version()) ++ lib, "python" + get_python_version()) + if standard_lib: + return libpython + else: +diff --git a/Lib/site.py b/Lib/site.py +index a84e3bb..ba0d3ea 100644 +--- a/Lib/site.py ++++ b/Lib/site.py +@@ -303,11 +303,15 @@ def getsitepackages(prefixes=None): + seen.add(prefix) + + if os.sep == '/': ++ sitepackages.append(os.path.join(prefix, "lib64", ++ "python" + sys.version[:3], ++ "site-packages")) + sitepackages.append(os.path.join(prefix, "lib", + "python%d.%d" % sys.version_info[:2], + "site-packages")) + else: + sitepackages.append(prefix) ++ sitepackages.append(os.path.join(prefix, "lib64", "site-packages")) + sitepackages.append(os.path.join(prefix, "lib", "site-packages")) + if sys.platform == "darwin": + # for framework builds *only* we add the standard Apple +diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py +index b9bbfe5..2a5f29c 100644 +--- a/Lib/sysconfig.py ++++ b/Lib/sysconfig.py +@@ -20,10 +20,10 @@ __all__ = [ + + _INSTALL_SCHEMES = { + 'posix_prefix': { +- 'stdlib': '{installed_base}/lib/python{py_version_short}', +- 'platstdlib': '{platbase}/lib/python{py_version_short}', ++ 'stdlib': '{installed_base}/lib64/python{py_version_short}', ++ 'platstdlib': '{platbase}/lib64/python{py_version_short}', + 'purelib': '{base}/lib/python{py_version_short}/site-packages', +- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages', ++ 'platlib': '{platbase}/lib64/python{py_version_short}/site-packages', + 'include': + '{installed_base}/include/python{py_version_short}{abiflags}', + 'platinclude': +@@ -61,10 +61,10 @@ _INSTALL_SCHEMES = { + 'data': '{userbase}', + }, + 'posix_user': { +- 'stdlib': '{userbase}/lib/python{py_version_short}', +- 'platstdlib': '{userbase}/lib/python{py_version_short}', ++ 'stdlib': '{userbase}/lib64/python{py_version_short}', ++ 'platstdlib': '{userbase}/lib64/python{py_version_short}', + 'purelib': '{userbase}/lib/python{py_version_short}/site-packages', +- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages', ++ 'platlib': '{userbase}/lib64/python{py_version_short}/site-packages', + 'include': '{userbase}/include/python{py_version_short}', + 'scripts': '{userbase}/bin', + 'data': '{userbase}', +diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py +index f698927..bc977b5 100644 +--- a/Lib/test/test_site.py ++++ b/Lib/test/test_site.py +@@ -248,8 +248,8 @@ class HelperFunctionsTests(unittest.TestCase): + self.assertEqual(dirs[1], wanted) + elif os.sep == '/': + # OS X non-framework builds, Linux, FreeBSD, etc +- self.assertEqual(len(dirs), 1) +- wanted = os.path.join('xoxo', 'lib', ++ self.assertEqual(len(dirs), 2) ++ wanted = os.path.join('xoxo', 'lib64', + 'python%d.%d' % sys.version_info[:2], + 'site-packages') + self.assertEqual(dirs[0], wanted) +diff --git a/Makefile.pre.in b/Makefile.pre.in +index 8fa7934..a693917 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -126,7 +126,7 @@ LIBDIR= @libdir@ + MANDIR= @mandir@ + INCLUDEDIR= @includedir@ + CONFINCLUDEDIR= $(exec_prefix)/include +-SCRIPTDIR= $(prefix)/lib ++SCRIPTDIR= $(prefix)/lib64 + ABIFLAGS= @ABIFLAGS@ + + # Detailed destination directories +diff --git a/Modules/getpath.c b/Modules/getpath.c +index 65b47a3..eaa756c 100644 +--- a/Modules/getpath.c ++++ b/Modules/getpath.c +@@ -494,7 +494,7 @@ calculate_path(void) + _pythonpath = Py_DecodeLocale(PYTHONPATH, NULL); + _prefix = Py_DecodeLocale(PREFIX, NULL); + _exec_prefix = Py_DecodeLocale(EXEC_PREFIX, NULL); +- lib_python = Py_DecodeLocale("lib/python" VERSION, NULL); ++ lib_python = Py_DecodeLocale("lib64/python" VERSION, NULL); + + if (!_pythonpath || !_prefix || !_exec_prefix || !lib_python) { + Py_FatalError( +@@ -683,7 +683,7 @@ calculate_path(void) + } + else + wcsncpy(zip_path, _prefix, MAXPATHLEN); +- joinpath(zip_path, L"lib/python00.zip"); ++ joinpath(zip_path, L"lib64/python00.zip"); + bufsz = wcslen(zip_path); /* Replace "00" with version */ + zip_path[bufsz - 6] = VERSION[0]; + zip_path[bufsz - 5] = VERSION[2]; +@@ -695,7 +695,7 @@ calculate_path(void) + fprintf(stderr, + "Could not find platform dependent libraries \n"); + wcsncpy(exec_prefix, _exec_prefix, MAXPATHLEN); +- joinpath(exec_prefix, L"lib/lib-dynload"); ++ joinpath(exec_prefix, L"lib64/lib-dynload"); + } + /* If we found EXEC_PREFIX do *not* reduce it! (Yet.) */ + +diff --git a/setup.py b/setup.py +index 0f2dfc4..da37896 100644 +--- a/setup.py ++++ b/setup.py +@@ -492,7 +492,7 @@ class PyBuildExt(build_ext): + # directories (i.e. '.' and 'Include') must be first. See issue + # 10520. + if not cross_compiling: +- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') ++ add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib64') + add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') + # only change this for cross builds for 3.3, issues on Mageia + if cross_compiling: +@@ -780,11 +780,11 @@ class PyBuildExt(build_ext): + elif curses_library: + readline_libs.append(curses_library) + elif self.compiler.find_library_file(lib_dirs + +- ['/usr/lib/termcap'], ++ ['/usr/lib64/termcap'], + 'termcap'): + readline_libs.append('termcap') + exts.append( Extension('readline', ['readline.c'], +- library_dirs=['/usr/lib/termcap'], ++ library_dirs=['/usr/lib64/termcap'], + extra_link_args=readline_extra_link_args, + libraries=readline_libs) ) + else: +@@ -821,8 +821,8 @@ class PyBuildExt(build_ext): + if krb5_h: + ssl_incs += krb5_h + ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, +- ['/usr/local/ssl/lib', +- '/usr/contrib/ssl/lib/' ++ ['/usr/local/ssl/lib64', ++ '/usr/contrib/ssl/lib64/' + ] ) + + if (ssl_incs is not None and diff --git a/SOURCES/00104-lib64-fix-for-test_install.patch b/SOURCES/00104-lib64-fix-for-test_install.patch new file mode 100644 index 00000000..7852bf69 --- /dev/null +++ b/SOURCES/00104-lib64-fix-for-test_install.patch @@ -0,0 +1,13 @@ +--- Python-2.7.2/Lib/distutils/tests/test_install.py.lib64 2011-09-08 17:51:57.851405376 -0400 ++++ Python-2.7.2/Lib/distutils/tests/test_install.py 2011-09-08 18:40:46.754205096 -0400 +@@ -41,8 +41,9 @@ class InstallTestCase(support.TempdirMan + self.assertEqual(got, expected) + + libdir = os.path.join(destination, "lib", "python") ++ platlibdir = os.path.join(destination, "lib64", "python") + check_path(cmd.install_lib, libdir) +- check_path(cmd.install_platlib, libdir) ++ check_path(cmd.install_platlib, platlibdir) + check_path(cmd.install_purelib, libdir) + check_path(cmd.install_headers, + os.path.join(destination, "include", "python", "foopkg")) diff --git a/SOURCES/00111-no-static-lib.patch b/SOURCES/00111-no-static-lib.patch new file mode 100644 index 00000000..bc4203de --- /dev/null +++ b/SOURCES/00111-no-static-lib.patch @@ -0,0 +1,60 @@ +diff --git a/Makefile.pre.in b/Makefile.pre.in +index 4b093e3..1088435 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -543,7 +543,7 @@ clinic: check-clean-src $(srcdir)/Modules/_blake2/blake2s_impl.c + $(PYTHON_FOR_REGEN) ./Tools/clinic/clinic.py --make + + # Build the interpreter +-$(BUILDPYTHON): Programs/python.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY) ++$(BUILDPYTHON): Programs/python.o $(LDLIBRARY) $(PY3LIBRARY) + $(LINKCC) $(PY_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) + + platform: $(BUILDPYTHON) pybuilddir.txt +@@ -588,18 +588,6 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build + + +-# Build static library +-# avoid long command lines, same as LIBRARY_OBJS +-$(LIBRARY): $(LIBRARY_OBJS) +- -rm -f $@ +- $(AR) $(ARFLAGS) $@ Modules/getbuildinfo.o +- $(AR) $(ARFLAGS) $@ $(PARSER_OBJS) +- $(AR) $(ARFLAGS) $@ $(OBJECT_OBJS) +- $(AR) $(ARFLAGS) $@ $(PYTHON_OBJS) Python/frozen.o +- $(AR) $(ARFLAGS) $@ $(MODULE_OBJS) +- $(AR) $(ARFLAGS) $@ $(MODOBJS) +- $(RANLIB) $@ +- + libpython$(LDVERSION).so: $(LIBRARY_OBJS) + if test $(INSTSONAME) != $(LDLIBRARY); then \ + $(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \ +@@ -689,7 +677,7 @@ Modules/Setup: $(srcdir)/Modules/Setup.dist + echo "-----------------------------------------------"; \ + fi + +-Programs/_testembed: Programs/_testembed.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY) ++Programs/_testembed: Programs/_testembed.o $(LDLIBRARY) $(PY3LIBRARY) + $(LINKCC) $(PY_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) + + ############################################################################ +@@ -1425,18 +1413,6 @@ libainstall: @DEF_MAKE_RULE@ python-config + else true; \ + fi; \ + done +- @if test -d $(LIBRARY); then :; else \ +- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \ +- if test "$(SHLIB_SUFFIX)" = .dll; then \ +- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \ +- else \ +- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \ +- $(RANLIB) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \ +- fi; \ +- else \ +- echo Skip install of $(LIBRARY) - use make frameworkinstall; \ +- fi; \ +- fi + $(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c + $(INSTALL_DATA) Programs/python.o $(DESTDIR)$(LIBPL)/python.o + $(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in diff --git a/SOURCES/00132-add-rpmbuild-hooks-to-unittest.patch b/SOURCES/00132-add-rpmbuild-hooks-to-unittest.patch new file mode 100644 index 00000000..77dc6ecb --- /dev/null +++ b/SOURCES/00132-add-rpmbuild-hooks-to-unittest.patch @@ -0,0 +1,46 @@ +diff -up Python-3.2.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest Python-3.2.2/Lib/unittest/case.py +--- Python-3.2.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest 2011-09-03 12:16:44.000000000 -0400 ++++ Python-3.2.2/Lib/unittest/case.py 2011-09-09 06:35:16.365568382 -0400 +@@ -3,6 +3,7 @@ + import sys + import functools + import difflib ++import os + import logging + import pprint + import re +@@ -101,5 +102,21 @@ def expectedFailure(func): + raise self.test_case.failureException(msg) + ++# Non-standard/downstream-only hooks for handling issues with specific test ++# cases: ++ ++def _skipInRpmBuild(reason): ++ """ ++ Non-standard/downstream-only decorator for marking a specific unit test ++ to be skipped when run within the %check of an rpmbuild. ++ ++ Specifically, this takes effect when WITHIN_PYTHON_RPM_BUILD is set within ++ the environment, and has no effect otherwise. ++ """ ++ if 'WITHIN_PYTHON_RPM_BUILD' in os.environ: ++ return skip(reason) ++ else: ++ return _id ++ + class _AssertRaisesBaseContext(_BaseTestCaseContext): + + def __init__(self, expected, test_case, expected_regex=None): +diff -up Python-3.2.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest Python-3.2.2/Lib/unittest/__init__.py +--- Python-3.2.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest 2011-09-03 12:16:44.000000000 -0400 ++++ Python-3.2.2/Lib/unittest/__init__.py 2011-09-09 06:35:16.366568382 -0400 +@@ -57,7 +57,8 @@ __unittest = True + + from .result import TestResult + from .case import (TestCase, FunctionTestCase, SkipTest, skip, skipIf, +- skipUnless, expectedFailure) ++ skipUnless, expectedFailure, ++ _skipInRpmBuild) + from .suite import BaseTestSuite, TestSuite + from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames, + findTestCases) diff --git a/SOURCES/00137-skip-distutils-tests-that-fail-in-rpmbuild.patch b/SOURCES/00137-skip-distutils-tests-that-fail-in-rpmbuild.patch new file mode 100644 index 00000000..04570930 --- /dev/null +++ b/SOURCES/00137-skip-distutils-tests-that-fail-in-rpmbuild.patch @@ -0,0 +1,12 @@ +diff -up Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py.skip-distutils-tests-that-fail-in-rpmbuild Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py +--- Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py.skip-distutils-tests-that-fail-in-rpmbuild 2011-09-03 12:16:40.000000000 -0400 ++++ Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py 2011-09-10 05:04:56.328852558 -0400 +@@ -23,6 +23,7 @@ setup(name='foo', version='0.1', py_modu + + """ + ++@unittest._skipInRpmBuild("don't try to nest one rpm build inside another rpm build") + class BuildRpmTestCase(support.TempdirManager, + support.EnvironGuard, + support.LoggingSilencer, +diff -up Python-3.2.2/Lib/distutils/tests/test_build_ext.py.skip-distutils-tests-that-fail-in-rpmbuild Python-3.2.2/Lib/distutils/tests/test_build_ext.py diff --git a/SOURCES/00146-hashlib-fips.patch b/SOURCES/00146-hashlib-fips.patch new file mode 100644 index 00000000..e0cdce0a --- /dev/null +++ b/SOURCES/00146-hashlib-fips.patch @@ -0,0 +1,640 @@ +diff --git a/Lib/hashlib.py b/Lib/hashlib.py +index 316cece..b7ad879 100644 +--- a/Lib/hashlib.py ++++ b/Lib/hashlib.py +@@ -23,6 +23,16 @@ the zlib module. + Choose your hash function wisely. Some have known collision weaknesses. + sha384 and sha512 will be slow on 32 bit platforms. + ++If the underlying implementation supports "FIPS mode", and this is enabled, it ++may restrict the available hashes to only those that are compliant with FIPS ++regulations. For example, it may deny the use of MD5, on the grounds that this ++is not secure for uses such as authentication, system integrity checking, or ++digital signatures. If you need to use such a hash for non-security purposes ++(such as indexing into a data structure for speed), you can override the keyword ++argument "usedforsecurity" from True to False to signify that your code is not ++relying on the hash for security purposes, and this will allow the hash to be ++usable even in FIPS mode. ++ + Hash objects have these methods: + - update(arg): Update the hash object with the bytes in arg. Repeated calls + are equivalent to a single call with the concatenation of all +@@ -62,6 +72,18 @@ algorithms_available = set(__always_supported) + __all__ = __always_supported + ('new', 'algorithms_guaranteed', + 'algorithms_available', 'pbkdf2_hmac') + ++import functools ++def __ignore_usedforsecurity(func): ++ """Used for sha3_* functions. Until OpenSSL implements them, we want ++ to use them from Python _sha3 module, but we want them to accept ++ usedforsecurity argument too.""" ++ # TODO: remove this function when OpenSSL implements sha3 ++ @functools.wraps(func) ++ def inner(*args, **kwargs): ++ if 'usedforsecurity' in kwargs: ++ kwargs.pop('usedforsecurity') ++ return func(*args, **kwargs) ++ return inner + + __builtin_constructor_cache = {} + +@@ -100,31 +122,39 @@ def __get_openssl_constructor(name): + f = getattr(_hashlib, 'openssl_' + name) + # Allow the C module to raise ValueError. The function will be + # defined but the hash not actually available thanks to OpenSSL. +- f() ++ # We pass "usedforsecurity=False" to disable FIPS-based restrictions: ++ # at this stage we're merely seeing if the function is callable, ++ # rather than using it for actual work. ++ f(usedforsecurity=False) + # Use the C function directly (very fast) + return f + except (AttributeError, ValueError): ++ # TODO: We want to just raise here when OpenSSL implements sha3 ++ # because we want to make sure that Fedora uses everything from OpenSSL + return __get_builtin_constructor(name) + + +-def __py_new(name, data=b''): +- """new(name, data=b'') - Return a new hashing object using the named algorithm; +- optionally initialized with data (which must be bytes). ++def __py_new(name, data=b'', usedforsecurity=True): ++ """new(name, data=b'', usedforsecurity=True) - Return a new hashing object using ++ the named algorithm; optionally initialized with data (which must be bytes). ++ The 'usedforsecurity' keyword argument does nothing, and is for compatibilty ++ with the OpenSSL implementation + """ + return __get_builtin_constructor(name)(data) + + +-def __hash_new(name, data=b''): +- """new(name, data=b'') - Return a new hashing object using the named algorithm; +- optionally initialized with data (which must be bytes). ++def __hash_new(name, data=b'', usedforsecurity=True): ++ """new(name, data=b'', usedforsecurity=True) - Return a new hashing object using ++ the named algorithm; optionally initialized with data (which must be bytes). ++ ++ Override 'usedforsecurity' to False when using for non-security purposes in ++ a FIPS environment + """ + try: +- return _hashlib.new(name, data) ++ return _hashlib.new(name, data, usedforsecurity) + except ValueError: +- # If the _hashlib module (OpenSSL) doesn't support the named +- # hash, try using our builtin implementations. +- # This allows for SHA224/256 and SHA384/512 support even though +- # the OpenSSL library prior to 0.9.8 doesn't provide them. ++ # TODO: We want to just raise here when OpenSSL implements sha3 ++ # because we want to make sure that Fedora uses everything from OpenSSL + return __get_builtin_constructor(name)(data) + + +@@ -207,7 +237,10 @@ for __func_name in __always_supported: + # try them all, some may not work due to the OpenSSL + # version not supporting that algorithm. + try: +- globals()[__func_name] = __get_hash(__func_name) ++ func = __get_hash(__func_name) ++ if 'sha3_' in __func_name: ++ func = __ignore_usedforsecurity(func) ++ globals()[__func_name] = func + except ValueError: + import logging + logging.exception('code for hash %s was not found.', __func_name) +@@ -215,3 +248,4 @@ for __func_name in __always_supported: + # Cleanup locals() + del __always_supported, __func_name, __get_hash + del __py_new, __hash_new, __get_openssl_constructor ++del __ignore_usedforsecurity +\ No newline at end of file +diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py +index c9b113e..60e2392 100644 +--- a/Lib/test/test_hashlib.py ++++ b/Lib/test/test_hashlib.py +@@ -24,7 +24,22 @@ from test.support import _4G, bigmemtest, import_fresh_module + COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') + + c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib']) +-py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib']) ++# skipped on Fedora, since we always use OpenSSL implementation ++# py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib']) ++ ++def openssl_enforces_fips(): ++ # Use the "openssl" command (if present) to try to determine if the local ++ # OpenSSL is configured to enforce FIPS ++ from subprocess import Popen, PIPE ++ try: ++ p = Popen(['openssl', 'md5'], ++ stdin=PIPE, stdout=PIPE, stderr=PIPE) ++ except OSError: ++ # "openssl" command not found ++ return False ++ stdout, stderr = p.communicate(input=b'abc') ++ return b'unknown cipher' in stderr ++OPENSSL_ENFORCES_FIPS = openssl_enforces_fips() + + def hexstr(s): + assert isinstance(s, bytes), repr(s) +@@ -34,6 +49,16 @@ def hexstr(s): + r += h[(i >> 4) & 0xF] + h[i & 0xF] + return r + ++# hashlib and _hashlib-based functions support a "usedforsecurity" keyword ++# argument, and FIPS mode requires that it be used overridden with a False ++# value for these selftests to work. Other cryptographic code within Python ++# doesn't support this keyword. ++# Modify a function to one in which "usedforsecurity=False" is added to the ++# keyword arguments: ++def suppress_fips(f): ++ def g(*args, **kwargs): ++ return f(*args, usedforsecurity=False, **kwargs) ++ return g + + class HashLibTestCase(unittest.TestCase): + supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1', +@@ -63,11 +88,11 @@ class HashLibTestCase(unittest.TestCase): + # For each algorithm, test the direct constructor and the use + # of hashlib.new given the algorithm name. + for algorithm, constructors in self.constructors_to_test.items(): +- constructors.add(getattr(hashlib, algorithm)) ++ constructors.add(suppress_fips(getattr(hashlib, algorithm))) + def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm): + if data is None: +- return hashlib.new(_alg) +- return hashlib.new(_alg, data) ++ return suppress_fips(hashlib.new)(_alg) ++ return suppress_fips(hashlib.new)(_alg, data) + constructors.add(_test_algorithm_via_hashlib_new) + + _hashlib = self._conditional_import_module('_hashlib') +@@ -79,27 +104,12 @@ class HashLibTestCase(unittest.TestCase): + for algorithm, constructors in self.constructors_to_test.items(): + constructor = getattr(_hashlib, 'openssl_'+algorithm, None) + if constructor: +- constructors.add(constructor) ++ constructors.add(suppress_fips(constructor)) + + def add_builtin_constructor(name): + constructor = getattr(hashlib, "__get_builtin_constructor")(name) + self.constructors_to_test[name].add(constructor) + +- _md5 = self._conditional_import_module('_md5') +- if _md5: +- add_builtin_constructor('md5') +- _sha1 = self._conditional_import_module('_sha1') +- if _sha1: +- add_builtin_constructor('sha1') +- _sha256 = self._conditional_import_module('_sha256') +- if _sha256: +- add_builtin_constructor('sha224') +- add_builtin_constructor('sha256') +- _sha512 = self._conditional_import_module('_sha512') +- if _sha512: +- add_builtin_constructor('sha384') +- add_builtin_constructor('sha512') +- + super(HashLibTestCase, self).__init__(*args, **kwargs) + + @property +@@ -148,9 +158,6 @@ class HashLibTestCase(unittest.TestCase): + else: + del sys.modules['_md5'] + self.assertRaises(TypeError, get_builtin_constructor, 3) +- constructor = get_builtin_constructor('md5') +- self.assertIs(constructor, _md5.md5) +- self.assertEqual(sorted(builtin_constructor_cache), ['MD5', 'md5']) + + def test_hexdigest(self): + for cons in self.hash_constructors: +@@ -433,6 +440,64 @@ class HashLibTestCase(unittest.TestCase): + + self.assertEqual(expected_hash, hasher.hexdigest()) + ++ def test_issue9146(self): ++ # Ensure that various ways to use "MD5" from "hashlib" don't segfault: ++ m = hashlib.md5(usedforsecurity=False) ++ m.update(b'abc\n') ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ m = hashlib.new('md5', usedforsecurity=False) ++ m.update(b'abc\n') ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ m = hashlib.md5(b'abc\n', usedforsecurity=False) ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ m = hashlib.new('md5', b'abc\n', usedforsecurity=False) ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ @unittest.skipUnless(OPENSSL_ENFORCES_FIPS, ++ 'FIPS enforcement required for this test.') ++ def test_hashlib_fips_mode(self): ++ # Ensure that we raise a ValueError on vanilla attempts to use MD5 ++ # in hashlib in a FIPS-enforced setting: ++ with self.assertRaisesRegexp(ValueError, '.*unknown cipher'): ++ m = hashlib.md5() ++ ++ if not self._conditional_import_module('_md5'): ++ with self.assertRaisesRegexp(ValueError, '.*unknown cipher'): ++ m = hashlib.new('md5') ++ ++ @unittest.skipUnless(OPENSSL_ENFORCES_FIPS, ++ 'FIPS enforcement required for this test.') ++ def test_hashopenssl_fips_mode(self): ++ # Verify the _hashlib module's handling of md5: ++ _hashlib = self._conditional_import_module('_hashlib') ++ if _hashlib: ++ assert hasattr(_hashlib, 'openssl_md5') ++ ++ # Ensure that _hashlib raises a ValueError on vanilla attempts to ++ # use MD5 in a FIPS-enforced setting: ++ with self.assertRaisesRegexp(ValueError, '.*unknown cipher'): ++ m = _hashlib.openssl_md5() ++ with self.assertRaisesRegexp(ValueError, '.*unknown cipher'): ++ m = _hashlib.new('md5') ++ ++ # Ensure that in such a setting we can whitelist a callsite with ++ # usedforsecurity=False and have it succeed: ++ m = _hashlib.openssl_md5(usedforsecurity=False) ++ m.update(b'abc\n') ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ m = _hashlib.new('md5', usedforsecurity=False) ++ m.update(b'abc\n') ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ m = _hashlib.openssl_md5(b'abc\n', usedforsecurity=False) ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") ++ ++ m = _hashlib.new('md5', b'abc\n', usedforsecurity=False) ++ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1") + + class KDFTests(unittest.TestCase): + +@@ -516,7 +581,7 @@ class KDFTests(unittest.TestCase): + out = pbkdf2(hash_name='sha1', password=b'password', salt=b'salt', + iterations=1, dklen=None) + self.assertEqual(out, self.pbkdf2_results['sha1'][0][0]) +- ++ @unittest.skip('skipped on Fedora, as we always use OpenSSL pbkdf2_hmac') + def test_pbkdf2_hmac_py(self): + self._test_pbkdf2_hmac(py_hashlib.pbkdf2_hmac) + +diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c +index 44765ac..b8cf490 100644 +--- a/Modules/_hashopenssl.c ++++ b/Modules/_hashopenssl.c +@@ -20,6 +20,8 @@ + + + /* EVP is the preferred interface to hashing in OpenSSL */ ++#include ++#include + #include + #include + /* We use the object interface to discover what hashes OpenSSL supports. */ +@@ -45,11 +47,19 @@ typedef struct { + + static PyTypeObject EVPtype; + ++/* Struct to hold all the cached information we need on a specific algorithm. ++ We have one of these per algorithm */ ++typedef struct { ++ PyObject *name_obj; ++ EVP_MD_CTX ctxs[2]; ++ /* ctx_ptrs will point to ctxs unless an error occurred, when it will ++ be NULL: */ ++ EVP_MD_CTX *ctx_ptrs[2]; ++ PyObject *error_msgs[2]; ++} EVPCachedInfo; + +-#define DEFINE_CONSTS_FOR_NEW(Name) \ +- static PyObject *CONST_ ## Name ## _name_obj = NULL; \ +- static EVP_MD_CTX CONST_new_ ## Name ## _ctx; \ +- static EVP_MD_CTX *CONST_new_ ## Name ## _ctx_p = NULL; ++#define DEFINE_CONSTS_FOR_NEW(Name) \ ++ static EVPCachedInfo cached_info_ ##Name; + + DEFINE_CONSTS_FOR_NEW(md5) + DEFINE_CONSTS_FOR_NEW(sha1) +@@ -92,6 +102,48 @@ EVP_hash(EVPobject *self, const void *vp, Py_ssize_t len) + } + } + ++static void ++mc_ctx_init(EVP_MD_CTX *ctx, int usedforsecurity) ++{ ++ EVP_MD_CTX_init(ctx); ++ ++ /* ++ If the user has declared that this digest is being used in a ++ non-security role (e.g. indexing into a data structure), set ++ the exception flag for openssl to allow it ++ */ ++ if (!usedforsecurity) { ++#ifdef EVP_MD_CTX_FLAG_NON_FIPS_ALLOW ++ EVP_MD_CTX_set_flags(ctx, ++ EVP_MD_CTX_FLAG_NON_FIPS_ALLOW); ++#endif ++ } ++} ++ ++/* Get an error msg for the last error as a PyObject */ ++static PyObject * ++error_msg_for_last_error(void) ++{ ++ char *errstr; ++ ++ errstr = ERR_error_string(ERR_peek_last_error(), NULL); ++ ERR_clear_error(); ++ ++ return PyUnicode_FromString(errstr); /* Can be NULL */ ++} ++ ++static void ++set_evp_exception(void) ++{ ++ char *errstr; ++ ++ errstr = ERR_error_string(ERR_peek_last_error(), NULL); ++ ERR_clear_error(); ++ ++ PyErr_SetString(PyExc_ValueError, errstr); ++} ++ ++ + /* Internal methods for a hash object */ + + static void +@@ -259,15 +311,16 @@ EVP_repr(EVPobject *self) + static int + EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds) + { +- static char *kwlist[] = {"name", "string", NULL}; ++ static char *kwlist[] = {"name", "string", "usedforsecurity", NULL}; + PyObject *name_obj = NULL; + PyObject *data_obj = NULL; ++ int usedforsecurity = 1; + Py_buffer view; + char *nameStr; + const EVP_MD *digest; + +- if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:HASH", kwlist, +- &name_obj, &data_obj)) { ++ if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|Oi:HASH", kwlist, ++ &name_obj, &data_obj, &usedforsecurity)) { + return -1; + } + +@@ -288,7 +341,12 @@ EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds) + PyBuffer_Release(&view); + return -1; + } +- EVP_DigestInit(&self->ctx, digest); ++ mc_ctx_init(&self->ctx, usedforsecurity); ++ if (!EVP_DigestInit_ex(&self->ctx, digest, NULL)) { ++ set_evp_exception(); ++ PyBuffer_Release(&view); ++ return -1; ++ } + + self->name = name_obj; + Py_INCREF(self->name); +@@ -372,7 +430,8 @@ static PyTypeObject EVPtype = { + static PyObject * + EVPnew(PyObject *name_obj, + const EVP_MD *digest, const EVP_MD_CTX *initial_ctx, +- const unsigned char *cp, Py_ssize_t len) ++ const unsigned char *cp, Py_ssize_t len, ++ int usedforsecurity) + { + EVPobject *self; + +@@ -387,7 +446,12 @@ EVPnew(PyObject *name_obj, + if (initial_ctx) { + EVP_MD_CTX_copy(&self->ctx, initial_ctx); + } else { +- EVP_DigestInit(&self->ctx, digest); ++ mc_ctx_init(&self->ctx, usedforsecurity); ++ if (!EVP_DigestInit_ex(&self->ctx, digest, NULL)) { ++ set_evp_exception(); ++ Py_DECREF(self); ++ return NULL; ++ } + } + + if (cp && len) { +@@ -411,21 +475,29 @@ PyDoc_STRVAR(EVP_new__doc__, + An optional string argument may be provided and will be\n\ + automatically hashed.\n\ + \n\ +-The MD5 and SHA1 algorithms are always supported.\n"); ++The MD5 and SHA1 algorithms are always supported.\n\ ++\n\ ++An optional \"usedforsecurity=True\" keyword argument is provided for use in\n\ ++environments that enforce FIPS-based restrictions. Some implementations of\n\ ++OpenSSL can be configured to prevent the usage of non-secure algorithms (such\n\ ++as MD5). If you have a non-security use for these algorithms (e.g. a hash\n\ ++table), you can override this argument by marking the callsite as\n\ ++\"usedforsecurity=False\"."); + + static PyObject * + EVP_new(PyObject *self, PyObject *args, PyObject *kwdict) + { +- static char *kwlist[] = {"name", "string", NULL}; ++ static char *kwlist[] = {"name", "string", "usedforsecurity", NULL}; + PyObject *name_obj = NULL; + PyObject *data_obj = NULL; ++ int usedforsecurity = 1; + Py_buffer view = { 0 }; + PyObject *ret_obj; + char *name; + const EVP_MD *digest; + +- if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|O:new", kwlist, +- &name_obj, &data_obj)) { ++ if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|Oi:new", kwlist, ++ &name_obj, &data_obj, &usedforsecurity)) { + return NULL; + } + +@@ -439,7 +511,8 @@ EVP_new(PyObject *self, PyObject *args, PyObject *kwdict) + + digest = EVP_get_digestbyname(name); + +- ret_obj = EVPnew(name_obj, digest, NULL, (unsigned char*)view.buf, view.len); ++ ret_obj = EVPnew(name_obj, digest, NULL, (unsigned char*)view.buf, view.len, ++ usedforsecurity); + + if (data_obj) + PyBuffer_Release(&view); +@@ -722,57 +795,114 @@ generate_hash_name_list(void) + + + /* +- * This macro generates constructor function definitions for specific +- * hash algorithms. These constructors are much faster than calling +- * the generic one passing it a python string and are noticably +- * faster than calling a python new() wrapper. Thats important for ++ * This macro and function generates a family of constructor function ++ * definitions for specific hash algorithms. These constructors are much ++ * faster than calling the generic one passing it a python string and are ++ * noticably faster than calling a python new() wrapper. That's important for + * code that wants to make hashes of a bunch of small strings. + */ + #define GEN_CONSTRUCTOR(NAME) \ + static PyObject * \ +- EVP_new_ ## NAME (PyObject *self, PyObject *args) \ ++ EVP_new_ ## NAME (PyObject *self, PyObject *args, PyObject *kwdict) \ + { \ +- PyObject *data_obj = NULL; \ +- Py_buffer view = { 0 }; \ +- PyObject *ret_obj; \ +- \ +- if (!PyArg_ParseTuple(args, "|O:" #NAME , &data_obj)) { \ +- return NULL; \ +- } \ +- \ +- if (data_obj) \ +- GET_BUFFER_VIEW_OR_ERROUT(data_obj, &view); \ +- \ +- ret_obj = EVPnew( \ +- CONST_ ## NAME ## _name_obj, \ +- NULL, \ +- CONST_new_ ## NAME ## _ctx_p, \ +- (unsigned char*)view.buf, \ +- view.len); \ +- \ +- if (data_obj) \ +- PyBuffer_Release(&view); \ +- return ret_obj; \ ++ return implement_specific_EVP_new(self, args, kwdict, \ ++ "|Oi:" #NAME, \ ++ &cached_info_ ## NAME ); \ + } + ++static PyObject * ++implement_specific_EVP_new(PyObject *self, PyObject *args, PyObject *kwdict, ++ const char *format, ++ EVPCachedInfo *cached_info) ++{ ++ static char *kwlist[] = {"string", "usedforsecurity", NULL}; ++ PyObject *data_obj = NULL; ++ Py_buffer view = { 0 }; ++ int usedforsecurity = 1; ++ int idx; ++ PyObject *ret_obj = NULL; ++ ++ assert(cached_info); ++ ++ if (!PyArg_ParseTupleAndKeywords(args, kwdict, format, kwlist, ++ &data_obj, &usedforsecurity)) { ++ return NULL; ++ } ++ ++ if (data_obj) ++ GET_BUFFER_VIEW_OR_ERROUT(data_obj, &view); ++ ++ idx = usedforsecurity ? 1 : 0; ++ ++ /* ++ * If an error occurred during creation of the global content, the ctx_ptr ++ * will be NULL, and the error_msg will hopefully be non-NULL: ++ */ ++ if (cached_info->ctx_ptrs[idx]) { ++ /* We successfully initialized this context; copy it: */ ++ ret_obj = EVPnew(cached_info->name_obj, ++ NULL, ++ cached_info->ctx_ptrs[idx], ++ (unsigned char*)view.buf, view.len, ++ usedforsecurity); ++ } else { ++ /* Some kind of error happened initializing the global context for ++ this (digest, usedforsecurity) pair. ++ Raise an exception with the saved error message: */ ++ if (cached_info->error_msgs[idx]) { ++ PyErr_SetObject(PyExc_ValueError, cached_info->error_msgs[idx]); ++ } else { ++ PyErr_SetString(PyExc_ValueError, "Error initializing hash"); ++ } ++ } ++ ++ if (data_obj) ++ PyBuffer_Release(&view); ++ ++ return ret_obj; ++} ++ + /* a PyMethodDef structure for the constructor */ + #define CONSTRUCTOR_METH_DEF(NAME) \ +- {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, METH_VARARGS, \ ++ {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, \ ++ METH_VARARGS|METH_KEYWORDS, \ + PyDoc_STR("Returns a " #NAME \ + " hash object; optionally initialized with a string") \ + } + +-/* used in the init function to setup a constructor: initialize OpenSSL +- constructor constants if they haven't been initialized already. */ +-#define INIT_CONSTRUCTOR_CONSTANTS(NAME) do { \ +- if (CONST_ ## NAME ## _name_obj == NULL) { \ +- CONST_ ## NAME ## _name_obj = PyUnicode_FromString(#NAME); \ +- if (EVP_get_digestbyname(#NAME)) { \ +- CONST_new_ ## NAME ## _ctx_p = &CONST_new_ ## NAME ## _ctx; \ +- EVP_DigestInit(CONST_new_ ## NAME ## _ctx_p, EVP_get_digestbyname(#NAME)); \ +- } \ +- } \ ++/* ++ Macro/function pair to set up the constructors. ++ ++ Try to initialize a context for each hash twice, once with ++ EVP_MD_CTX_FLAG_NON_FIPS_ALLOW and once without. ++ ++ Any that have errors during initialization will end up with a NULL ctx_ptrs ++ entry, and err_msgs will be set (unless we're very low on memory) ++*/ ++#define INIT_CONSTRUCTOR_CONSTANTS(NAME) do { \ ++ init_constructor_constant(&cached_info_ ## NAME, #NAME); \ + } while (0); ++static void ++init_constructor_constant(EVPCachedInfo *cached_info, const char *name) ++{ ++ assert(cached_info); ++ cached_info->name_obj = PyUnicode_FromString(name); ++ if (EVP_get_digestbyname(name)) { ++ int i; ++ for (i=0; i<2; i++) { ++ mc_ctx_init(&cached_info->ctxs[i], i); ++ if (EVP_DigestInit_ex(&cached_info->ctxs[i], ++ EVP_get_digestbyname(name), NULL)) { ++ /* Success: */ ++ cached_info->ctx_ptrs[i] = &cached_info->ctxs[i]; ++ } else { ++ /* Failure: */ ++ cached_info->ctx_ptrs[i] = NULL; ++ cached_info->error_msgs[i] = error_msg_for_last_error(); ++ } ++ } ++ } ++} + + GEN_CONSTRUCTOR(md5) + GEN_CONSTRUCTOR(sha1) +@@ -819,13 +949,10 @@ PyInit__hashlib(void) + { + PyObject *m, *openssl_md_meth_names; + +- OpenSSL_add_all_digests(); +- ERR_load_crypto_strings(); ++ SSL_load_error_strings(); ++ SSL_library_init(); + +- /* TODO build EVP_functions openssl_* entries dynamically based +- * on what hashes are supported rather than listing many +- * but having some be unsupported. Only init appropriate +- * constants. */ ++ OpenSSL_add_all_digests(); + + Py_TYPE(&EVPtype) = &PyType_Type; + if (PyType_Ready(&EVPtype) < 0) diff --git a/SOURCES/00155-avoid-ctypes-thunks.patch b/SOURCES/00155-avoid-ctypes-thunks.patch new file mode 100644 index 00000000..f03890ee --- /dev/null +++ b/SOURCES/00155-avoid-ctypes-thunks.patch @@ -0,0 +1,15 @@ +diff -up Python-3.2.3/Lib/ctypes/__init__.py.rhbz814391 Python-3.2.3/Lib/ctypes/__init__.py +--- Python-3.2.3/Lib/ctypes/__init__.py.rhbz814391 2012-04-20 15:12:49.017867692 -0400 ++++ Python-3.2.3/Lib/ctypes/__init__.py 2012-04-20 15:15:09.501111408 -0400 +@@ -275,11 +275,6 @@ def _reset_cache(): + # _SimpleCData.c_char_p_from_param + POINTER(c_char).from_param = c_char_p.from_param + _pointer_type_cache[None] = c_void_p +- # XXX for whatever reasons, creating the first instance of a callback +- # function is needed for the unittests on Win64 to succeed. This MAY +- # be a compiler bug, since the problem occurs only when _ctypes is +- # compiled with the MS SDK compiler. Or an uninitialized variable? +- CFUNCTYPE(c_int)(lambda: None) + + def create_unicode_buffer(init, size=None): + """create_unicode_buffer(aString) -> character array diff --git a/SOURCES/00157-uid-gid-overflows.patch b/SOURCES/00157-uid-gid-overflows.patch new file mode 100644 index 00000000..03f3e021 --- /dev/null +++ b/SOURCES/00157-uid-gid-overflows.patch @@ -0,0 +1,68 @@ +diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py +index e9fdb07..ea60e6e 100644 +--- a/Lib/test/test_os.py ++++ b/Lib/test/test_os.py +@@ -1723,30 +1723,36 @@ class PosixUidGidTests(unittest.TestCase): + def test_setuid(self): + if os.getuid() != 0: + self.assertRaises(OSError, os.setuid, 0) ++ self.assertRaises(TypeError, os.setuid, 'not an int') + self.assertRaises(OverflowError, os.setuid, 1<<32) + + @unittest.skipUnless(hasattr(os, 'setgid'), 'test needs os.setgid()') + def test_setgid(self): + if os.getuid() != 0 and not HAVE_WHEEL_GROUP: + self.assertRaises(OSError, os.setgid, 0) ++ self.assertRaises(TypeError, os.setgid, 'not an int') + self.assertRaises(OverflowError, os.setgid, 1<<32) + + @unittest.skipUnless(hasattr(os, 'seteuid'), 'test needs os.seteuid()') + def test_seteuid(self): + if os.getuid() != 0: + self.assertRaises(OSError, os.seteuid, 0) ++ self.assertRaises(TypeError, os.seteuid, 'not an int') + self.assertRaises(OverflowError, os.seteuid, 1<<32) + + @unittest.skipUnless(hasattr(os, 'setegid'), 'test needs os.setegid()') + def test_setegid(self): + if os.getuid() != 0 and not HAVE_WHEEL_GROUP: + self.assertRaises(OSError, os.setegid, 0) ++ self.assertRaises(TypeError, os.setegid, 'not an int') + self.assertRaises(OverflowError, os.setegid, 1<<32) + + @unittest.skipUnless(hasattr(os, 'setreuid'), 'test needs os.setreuid()') + def test_setreuid(self): + if os.getuid() != 0: + self.assertRaises(OSError, os.setreuid, 0, 0) ++ self.assertRaises(TypeError, os.setreuid, 'not an int', 0) ++ self.assertRaises(TypeError, os.setreuid, 0, 'not an int') + self.assertRaises(OverflowError, os.setreuid, 1<<32, 0) + self.assertRaises(OverflowError, os.setreuid, 0, 1<<32) + +@@ -1762,6 +1768,8 @@ class PosixUidGidTests(unittest.TestCase): + def test_setregid(self): + if os.getuid() != 0 and not HAVE_WHEEL_GROUP: + self.assertRaises(OSError, os.setregid, 0, 0) ++ self.assertRaises(TypeError, os.setregid, 'not an int', 0) ++ self.assertRaises(TypeError, os.setregid, 0, 'not an int') + self.assertRaises(OverflowError, os.setregid, 1<<32, 0) + self.assertRaises(OverflowError, os.setregid, 0, 1<<32) + +diff --git a/Lib/test/test_pwd.py b/Lib/test/test_pwd.py +index ac9cff7..db98159 100644 +--- a/Lib/test/test_pwd.py ++++ b/Lib/test/test_pwd.py +@@ -104,11 +104,11 @@ class PwdTest(unittest.TestCase): + # In some cases, byuids isn't a complete list of all users in the + # system, so if we try to pick a value not in byuids (via a perturbing + # loop, say), pwd.getpwuid() might still be able to find data for that +- # uid. Using sys.maxint may provoke the same problems, but hopefully ++ # uid. Using 2**32 - 2 may provoke the same problems, but hopefully + # it will be a more repeatable failure. + # Android accepts a very large span of uids including sys.maxsize and + # -1; it raises KeyError with 1 or 2 for example. +- fakeuid = sys.maxsize ++ fakeuid = 2**32 - 2 + self.assertNotIn(fakeuid, byuids) + if not support.is_android: + self.assertRaises(KeyError, pwd.getpwuid, fakeuid) diff --git a/SOURCES/00160-disable-test_fs_holes-in-rpm-build.patch b/SOURCES/00160-disable-test_fs_holes-in-rpm-build.patch new file mode 100644 index 00000000..9fa91d5e --- /dev/null +++ b/SOURCES/00160-disable-test_fs_holes-in-rpm-build.patch @@ -0,0 +1,11 @@ +diff -up cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build cpython-59223da36dec/Lib/test/test_posix.py +--- cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build 2012-08-07 17:15:59.000000000 -0400 ++++ cpython-59223da36dec/Lib/test/test_posix.py 2012-08-07 17:16:53.528330330 -0400 +@@ -973,6 +973,7 @@ class PosixTester(unittest.TestCase): + posix.RTLD_GLOBAL + posix.RTLD_LOCAL + ++ @unittest._skipInRpmBuild('running kernel may not match kernel in chroot') + @unittest.skipUnless(hasattr(os, 'SEEK_HOLE'), + "test needs an OS that reports file holes") + def test_fs_holes(self): diff --git a/SOURCES/00163-disable-parts-of-test_socket-in-rpm-build.patch b/SOURCES/00163-disable-parts-of-test_socket-in-rpm-build.patch new file mode 100644 index 00000000..0e280360 --- /dev/null +++ b/SOURCES/00163-disable-parts-of-test_socket-in-rpm-build.patch @@ -0,0 +1,11 @@ +diff -up Python-3.3.0b1/Lib/test/test_socket.py.disable-test_socket-in-rpm-builds Python-3.3.0b1/Lib/test/test_socket.py +--- Python-3.3.0b1/Lib/test/test_socket.py.disable-test_socket-in-rpm-builds 2012-07-24 15:02:30.823355067 -0400 ++++ Python-3.3.0b1/Lib/test/test_socket.py 2012-07-24 15:08:13.021354999 -0400 +@@ -2188,6 +2188,7 @@ class RecvmsgGenericStreamTests(RecvmsgG + # Tests which require a stream socket and can use either recvmsg() + # or recvmsg_into(). + ++ @unittest._skipInRpmBuild('fails intermittently when run within Koji') + def testRecvmsgEOF(self): + # Receive end-of-stream indicator (b"", peer socket closed). + msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock, 1024) diff --git a/SOURCES/00170-gc-assertions.patch b/SOURCES/00170-gc-assertions.patch new file mode 100644 index 00000000..f4917334 --- /dev/null +++ b/SOURCES/00170-gc-assertions.patch @@ -0,0 +1,310 @@ +diff --git a/Include/object.h b/Include/object.h +index 0c88603..e3413e8 100644 +--- a/Include/object.h ++++ b/Include/object.h +@@ -1059,6 +1059,49 @@ PyAPI_FUNC(void) + _PyObject_DebugTypeStats(FILE *out); + #endif /* ifndef Py_LIMITED_API */ + ++/* ++ Define a pair of assertion macros. ++ ++ These work like the regular C assert(), in that they will abort the ++ process with a message on stderr if the given condition fails to hold, ++ but compile away to nothing if NDEBUG is defined. ++ ++ However, before aborting, Python will also try to call _PyObject_Dump() on ++ the given object. This may be of use when investigating bugs in which a ++ particular object is corrupt (e.g. buggy a tp_visit method in an extension ++ module breaking the garbage collector), to help locate the broken objects. ++ ++ The WITH_MSG variant allows you to supply an additional message that Python ++ will attempt to print to stderr, after the object dump. ++*/ ++#ifdef NDEBUG ++/* No debugging: compile away the assertions: */ ++#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) ((void)0) ++#else ++/* With debugging: generate checks: */ ++#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) \ ++ ((expr) \ ++ ? (void)(0) \ ++ : _PyObject_AssertFailed((obj), \ ++ (msg), \ ++ (__STRING(expr)), \ ++ (__FILE__), \ ++ (__LINE__), \ ++ (__PRETTY_FUNCTION__))) ++#endif ++ ++#define PyObject_ASSERT(obj, expr) \ ++ PyObject_ASSERT_WITH_MSG(obj, expr, NULL) ++ ++/* ++ Declare and define the entrypoint even when NDEBUG is defined, to avoid ++ causing compiler/linker errors when building extensions without NDEBUG ++ against a Python built with NDEBUG defined ++*/ ++PyAPI_FUNC(void) _PyObject_AssertFailed(PyObject *, const char *, ++ const char *, const char *, int, ++ const char *); ++ + #ifdef __cplusplus + } + #endif +diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py +index e727499..6efcafb 100644 +--- a/Lib/test/test_gc.py ++++ b/Lib/test/test_gc.py +@@ -1,10 +1,11 @@ + import unittest + from test.support import (verbose, refcount_test, run_unittest, + strip_python_stderr, cpython_only, start_threads, +- temp_dir, requires_type_collecting) ++ temp_dir, import_module, requires_type_collecting) + from test.support.script_helper import assert_python_ok, make_script + + import sys ++import sysconfig + import time + import gc + import weakref +@@ -50,6 +51,8 @@ class GC_Detector(object): + # gc collects it. + self.wr = weakref.ref(C1055820(666), it_happened) + ++BUILD_WITH_NDEBUG = ('-DNDEBUG' in sysconfig.get_config_vars()['PY_CFLAGS']) ++ + @with_tp_del + class Uncollectable(object): + """Create a reference cycle with multiple __del__ methods. +@@ -862,6 +865,50 @@ class GCCallbackTests(unittest.TestCase): + self.assertEqual(len(gc.garbage), 0) + + ++ @unittest.skipIf(BUILD_WITH_NDEBUG, ++ 'built with -NDEBUG') ++ def test_refcount_errors(self): ++ self.preclean() ++ # Verify the "handling" of objects with broken refcounts ++ import_module("ctypes") #skip if not supported ++ ++ import subprocess ++ code = '''if 1: ++ a = [] ++ b = [a] ++ ++ # Simulate the refcount of "a" being too low (compared to the ++ # references held on it by live data), but keeping it above zero ++ # (to avoid deallocating it): ++ import ctypes ++ ctypes.pythonapi.Py_DecRef(ctypes.py_object(a)) ++ ++ # The garbage collector should now have a fatal error when it reaches ++ # the broken object: ++ import gc ++ gc.collect() ++ ''' ++ p = subprocess.Popen([sys.executable, "-c", code], ++ stdout=subprocess.PIPE, ++ stderr=subprocess.PIPE) ++ stdout, stderr = p.communicate() ++ p.stdout.close() ++ p.stderr.close() ++ # Verify that stderr has a useful error message: ++ self.assertRegex(stderr, ++ b'Modules/gcmodule.c:[0-9]+: visit_decref: Assertion "\(\(gc\)->gc.gc_refs >> \(1\)\) != 0" failed.') ++ self.assertRegex(stderr, ++ b'refcount was too small') ++ self.assertRegex(stderr, ++ b'object : \[\]') ++ self.assertRegex(stderr, ++ b'type : list') ++ self.assertRegex(stderr, ++ b'refcount: 1') ++ self.assertRegex(stderr, ++ b'address : 0x[0-9a-f]+') ++ ++ + class GCTogglingTests(unittest.TestCase): + def setUp(self): + gc.enable() +diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c +index 0c6f444..87edd5a 100644 +--- a/Modules/gcmodule.c ++++ b/Modules/gcmodule.c +@@ -341,7 +341,8 @@ update_refs(PyGC_Head *containers) + { + PyGC_Head *gc = containers->gc.gc_next; + for (; gc != containers; gc = gc->gc.gc_next) { +- assert(_PyGCHead_REFS(gc) == GC_REACHABLE); ++ PyObject_ASSERT(FROM_GC(gc), ++ _PyGCHead_REFS(gc) == GC_REACHABLE); + _PyGCHead_SET_REFS(gc, Py_REFCNT(FROM_GC(gc))); + /* Python's cyclic gc should never see an incoming refcount + * of 0: if something decref'ed to 0, it should have been +@@ -361,7 +362,8 @@ update_refs(PyGC_Head *containers) + * so serious that maybe this should be a release-build + * check instead of an assert? + */ +- assert(_PyGCHead_REFS(gc) != 0); ++ PyObject_ASSERT(FROM_GC(gc), ++ _PyGCHead_REFS(gc) != 0); + } + } + +@@ -376,7 +378,9 @@ visit_decref(PyObject *op, void *data) + * generation being collected, which can be recognized + * because only they have positive gc_refs. + */ +- assert(_PyGCHead_REFS(gc) != 0); /* else refcount was too small */ ++ PyObject_ASSERT_WITH_MSG(FROM_GC(gc), ++ _PyGCHead_REFS(gc) != 0, ++ "refcount was too small"); /* else refcount was too small */ + if (_PyGCHead_REFS(gc) > 0) + _PyGCHead_DECREF(gc); + } +@@ -436,9 +440,10 @@ visit_reachable(PyObject *op, PyGC_Head *reachable) + * If gc_refs == GC_UNTRACKED, it must be ignored. + */ + else { +- assert(gc_refs > 0 +- || gc_refs == GC_REACHABLE +- || gc_refs == GC_UNTRACKED); ++ PyObject_ASSERT(FROM_GC(gc), ++ gc_refs > 0 ++ || gc_refs == GC_REACHABLE ++ || gc_refs == GC_UNTRACKED); + } + } + return 0; +@@ -480,7 +485,7 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable) + */ + PyObject *op = FROM_GC(gc); + traverseproc traverse = Py_TYPE(op)->tp_traverse; +- assert(_PyGCHead_REFS(gc) > 0); ++ PyObject_ASSERT(op, _PyGCHead_REFS(gc) > 0); + _PyGCHead_SET_REFS(gc, GC_REACHABLE); + (void) traverse(op, + (visitproc)visit_reachable, +@@ -543,7 +548,7 @@ move_legacy_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers) + for (gc = unreachable->gc.gc_next; gc != unreachable; gc = next) { + PyObject *op = FROM_GC(gc); + +- assert(IS_TENTATIVELY_UNREACHABLE(op)); ++ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op)); + next = gc->gc.gc_next; + + if (has_legacy_finalizer(op)) { +@@ -619,7 +624,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + PyWeakReference **wrlist; + + op = FROM_GC(gc); +- assert(IS_TENTATIVELY_UNREACHABLE(op)); ++ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op)); + next = gc->gc.gc_next; + + if (! PyType_SUPPORTS_WEAKREFS(Py_TYPE(op))) +@@ -640,9 +645,9 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + * the callback pointer intact. Obscure: it also + * changes *wrlist. + */ +- assert(wr->wr_object == op); ++ PyObject_ASSERT(wr->wr_object, wr->wr_object == op); + _PyWeakref_ClearRef(wr); +- assert(wr->wr_object == Py_None); ++ PyObject_ASSERT(wr->wr_object, wr->wr_object == Py_None); + if (wr->wr_callback == NULL) + continue; /* no callback */ + +@@ -676,7 +681,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + */ + if (IS_TENTATIVELY_UNREACHABLE(wr)) + continue; +- assert(IS_REACHABLE(wr)); ++ PyObject_ASSERT(op, IS_REACHABLE(wr)); + + /* Create a new reference so that wr can't go away + * before we can process it again. +@@ -685,7 +690,8 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + + /* Move wr to wrcb_to_call, for the next pass. */ + wrasgc = AS_GC(wr); +- assert(wrasgc != next); /* wrasgc is reachable, but ++ PyObject_ASSERT(op, wrasgc != next); ++ /* wrasgc is reachable, but + next isn't, so they can't + be the same */ + gc_list_move(wrasgc, &wrcb_to_call); +@@ -701,11 +707,11 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + + gc = wrcb_to_call.gc.gc_next; + op = FROM_GC(gc); +- assert(IS_REACHABLE(op)); +- assert(PyWeakref_Check(op)); ++ PyObject_ASSERT(op, IS_REACHABLE(op)); ++ PyObject_ASSERT(op, PyWeakref_Check(op)); + wr = (PyWeakReference *)op; + callback = wr->wr_callback; +- assert(callback != NULL); ++ PyObject_ASSERT(op, callback != NULL); + + /* copy-paste of weakrefobject.c's handle_callback() */ + temp = PyObject_CallFunctionObjArgs(callback, wr, NULL); +@@ -822,12 +828,14 @@ check_garbage(PyGC_Head *collectable) + for (gc = collectable->gc.gc_next; gc != collectable; + gc = gc->gc.gc_next) { + _PyGCHead_SET_REFS(gc, Py_REFCNT(FROM_GC(gc))); +- assert(_PyGCHead_REFS(gc) != 0); ++ PyObject_ASSERT(FROM_GC(gc), ++ _PyGCHead_REFS(gc) != 0); + } + subtract_refs(collectable); + for (gc = collectable->gc.gc_next; gc != collectable; + gc = gc->gc.gc_next) { +- assert(_PyGCHead_REFS(gc) >= 0); ++ PyObject_ASSERT(FROM_GC(gc), ++ _PyGCHead_REFS(gc) >= 0); + if (_PyGCHead_REFS(gc) != 0) + return -1; + } +diff --git a/Objects/object.c b/Objects/object.c +index 559794f..a47d47f 100644 +--- a/Objects/object.c ++++ b/Objects/object.c +@@ -2022,6 +2022,35 @@ _PyTrash_thread_destroy_chain(void) + } + } + ++PyAPI_FUNC(void) ++_PyObject_AssertFailed(PyObject *obj, const char *msg, const char *expr, ++ const char *file, int line, const char *function) ++{ ++ fprintf(stderr, ++ "%s:%d: %s: Assertion \"%s\" failed.\n", ++ file, line, function, expr); ++ if (msg) { ++ fprintf(stderr, "%s\n", msg); ++ } ++ ++ fflush(stderr); ++ ++ if (obj) { ++ /* This might succeed or fail, but we're about to abort, so at least ++ try to provide any extra info we can: */ ++ _PyObject_Dump(obj); ++ } ++ else { ++ fprintf(stderr, "NULL object\n"); ++ } ++ ++ fflush(stdout); ++ fflush(stderr); ++ ++ /* Terminate the process: */ ++ abort(); ++} ++ + #ifndef Py_TRACE_REFS + /* For Py_LIMITED_API, we need an out-of-line version of _Py_Dealloc. + Define this here, so we can undefine the macro. */ diff --git a/SOURCES/00178-dont-duplicate-flags-in-sysconfig.patch b/SOURCES/00178-dont-duplicate-flags-in-sysconfig.patch new file mode 100644 index 00000000..fc49b300 --- /dev/null +++ b/SOURCES/00178-dont-duplicate-flags-in-sysconfig.patch @@ -0,0 +1,30 @@ +diff -r 39b9b05c3085 Lib/distutils/sysconfig.py +--- a/Lib/distutils/sysconfig.py Wed Apr 10 00:27:23 2013 +0200 ++++ b/Lib/distutils/sysconfig.py Wed Apr 10 10:14:18 2013 +0200 +@@ -362,7 +362,10 @@ + done[n] = item = "" + if found: + after = value[m.end():] +- value = value[:m.start()] + item + after ++ value = value[:m.start()] ++ if item.strip() not in value: ++ value += item ++ value += after + if "$" in after: + notdone[name] = value + else: +diff -r 39b9b05c3085 Lib/sysconfig.py +--- a/Lib/sysconfig.py Wed Apr 10 00:27:23 2013 +0200 ++++ b/Lib/sysconfig.py Wed Apr 10 10:14:18 2013 +0200 +@@ -296,7 +296,10 @@ + + if found: + after = value[m.end():] +- value = value[:m.start()] + item + after ++ value = value[:m.start()] ++ if item.strip() not in value: ++ value += item ++ value += after + if "$" in after: + notdone[name] = value + else: diff --git a/SOURCES/00180-python-add-support-for-ppc64p7.patch b/SOURCES/00180-python-add-support-for-ppc64p7.patch new file mode 100644 index 00000000..054f9f39 --- /dev/null +++ b/SOURCES/00180-python-add-support-for-ppc64p7.patch @@ -0,0 +1,13 @@ +diff --git a/config.sub b/config.sub +index 40ea5df..932128b 100755 +--- a/config.sub ++++ b/config.sub +@@ -1045,7 +1045,7 @@ case $basic_machine in + ;; + ppc64) basic_machine=powerpc64-unknown + ;; +- ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` ++ ppc64-* | ppc64p7-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` + ;; + ppc64le | powerpc64little) + basic_machine=powerpc64le-unknown diff --git a/SOURCES/00186-dont-raise-from-py_compile.patch b/SOURCES/00186-dont-raise-from-py_compile.patch new file mode 100644 index 00000000..25c4a9de --- /dev/null +++ b/SOURCES/00186-dont-raise-from-py_compile.patch @@ -0,0 +1,14 @@ +diff -r 7fa3e824a4ee Lib/test/test_py_compile.py +--- a/Lib/test/test_py_compile.py Tue Oct 29 22:25:06 2013 -0400 ++++ b/Lib/test/test_py_compile.py Wed Oct 30 11:08:31 2013 +0100 +@@ -54,6 +54,10 @@ + self.assertTrue(os.path.exists(self.pyc_path)) + self.assertFalse(os.path.exists(self.cache_path)) + ++ def test_bad_coding(self): ++ bad_coding = os.path.join(os.path.dirname(__file__), 'bad_coding2.py') ++ self.assertIsNone(py_compile.compile(bad_coding, doraise=False)) ++ + def test_relative_path(self): + py_compile.compile(os.path.relpath(self.source_path), + os.path.relpath(self.pyc_path)) diff --git a/SOURCES/00188-fix-lib2to3-tests-when-hashlib-doesnt-compile-properly.patch b/SOURCES/00188-fix-lib2to3-tests-when-hashlib-doesnt-compile-properly.patch new file mode 100644 index 00000000..90af30db --- /dev/null +++ b/SOURCES/00188-fix-lib2to3-tests-when-hashlib-doesnt-compile-properly.patch @@ -0,0 +1,11 @@ +diff -r 28c04e954bb6 Lib/lib2to3/main.py +--- a/Lib/lib2to3/main.py Tue Oct 29 22:25:55 2013 -0400 ++++ b/Lib/lib2to3/main.py Wed Nov 06 14:33:07 2013 +0100 +@@ -213,6 +213,7 @@ + + # Set up logging handler + level = logging.DEBUG if options.verbose else logging.INFO ++ logging.root.handlers = [] + logging.basicConfig(format='%(name)s: %(message)s', level=level) + logger = logging.getLogger('lib2to3.main') + diff --git a/SOURCES/00189-add-rewheel-module.patch b/SOURCES/00189-add-rewheel-module.patch new file mode 100644 index 00000000..36b26112 --- /dev/null +++ b/SOURCES/00189-add-rewheel-module.patch @@ -0,0 +1,233 @@ +diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py +index d69e09f..5cb12df 100644 +--- a/Lib/ensurepip/__init__.py ++++ b/Lib/ensurepip/__init__.py +@@ -1,8 +1,10 @@ + import os + import os.path + import pkgutil ++import shutil + import sys + import tempfile ++from ensurepip import rewheel + + + __all__ = ["version", "bootstrap"] +@@ -25,6 +27,8 @@ def _run_pip(args, additional_paths=None): + + # Install the bundled software + import pip ++ if args[0] in ["install", "list", "wheel"]: ++ args.append('--pre') + return pip.main(args) + + +@@ -88,20 +92,39 @@ def _bootstrap(*, root=None, upgrade=False, user=False, + # omit pip and easy_install + os.environ["ENSUREPIP_OPTIONS"] = "install" + ++ whls = [] ++ rewheel_dir = None ++ # try to see if we have system-wide versions of _PROJECTS ++ dep_records = rewheel.find_system_records([p[0] for p in _PROJECTS]) ++ # TODO: check if system-wide versions are the newest ones ++ # if --upgrade is used? ++ if all(dep_records): ++ # if we have all _PROJECTS installed system-wide, we'll recreate ++ # wheels from them and install those ++ rewheel_dir = tempfile.TemporaryDirectory() ++ for dr in dep_records: ++ new_whl = rewheel.rewheel_from_record(dr, rewheel_dir.name) ++ whls.append(os.path.join(rewheel_dir.name, new_whl)) ++ else: ++ # if we don't have all the _PROJECTS installed system-wide, ++ # let's just fall back to bundled wheels ++ for project, version in _PROJECTS: ++ whl = os.path.join( ++ os.path.dirname(__file__), ++ "_bundled", ++ "{}-{}-py2.py3-none-any.whl".format(project, version) ++ ) ++ whls.append(whl) ++ + with tempfile.TemporaryDirectory() as tmpdir: + # Put our bundled wheels into a temporary directory and construct the + # additional paths that need added to sys.path + additional_paths = [] +- for project, version in _PROJECTS: +- wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version) +- whl = pkgutil.get_data( +- "ensurepip", +- "_bundled/{}".format(wheel_name), +- ) +- with open(os.path.join(tmpdir, wheel_name), "wb") as fp: +- fp.write(whl) +- +- additional_paths.append(os.path.join(tmpdir, wheel_name)) ++ for whl in whls: ++ shutil.copy(whl, tmpdir) ++ additional_paths.append(os.path.join(tmpdir, os.path.basename(whl))) ++ if rewheel_dir: ++ rewheel_dir.cleanup() + + # Construct the arguments to be passed to the pip command + args = ["install", "--no-index", "--find-links", tmpdir] +diff -Nur Python-3.4.1/Lib/ensurepip/rewheel/__init__.py Python-3.4.1-rewheel/Lib/ensurepip/rewheel/__init__.py +--- Python-3.4.1/Lib/ensurepip/rewheel/__init__.py 1970-01-01 01:00:00.000000000 +0100 ++++ Python-3.4.1-rewheel/Lib/ensurepip/rewheel/__init__.py 2014-08-21 10:11:22.560320121 +0200 +@@ -0,0 +1,143 @@ ++import argparse ++import codecs ++import csv ++import email.parser ++import os ++import io ++import re ++import site ++import subprocess ++import sys ++import zipfile ++ ++def run(): ++ parser = argparse.ArgumentParser(description='Recreate wheel of package with given RECORD.') ++ parser.add_argument('record_path', ++ help='Path to RECORD file') ++ parser.add_argument('-o', '--output-dir', ++ help='Dir where to place the wheel, defaults to current working dir.', ++ dest='outdir', ++ default=os.path.curdir) ++ ++ ns = parser.parse_args() ++ retcode = 0 ++ try: ++ print(rewheel_from_record(**vars(ns))) ++ except BaseException as e: ++ print('Failed: {}'.format(e)) ++ retcode = 1 ++ sys.exit(1) ++ ++def find_system_records(projects): ++ """Return list of paths to RECORD files for system-installed projects. ++ ++ If a project is not installed, the resulting list contains None instead ++ of a path to its RECORD ++ """ ++ records = [] ++ # get system site-packages dirs ++ sys_sitepack = site.getsitepackages([sys.base_prefix, sys.base_exec_prefix]) ++ sys_sitepack = [sp for sp in sys_sitepack if os.path.exists(sp)] ++ # try to find all projects in all system site-packages ++ for project in projects: ++ path = None ++ for sp in sys_sitepack: ++ dist_info_re = os.path.join(sp, project) + r'-[^\{0}]+\.dist-info'.format(os.sep) ++ candidates = [os.path.join(sp, p) for p in os.listdir(sp)] ++ # filter out candidate dirs based on the above regexp ++ filtered = [c for c in candidates if re.match(dist_info_re, c)] ++ # if we have 0 or 2 or more dirs, something is wrong... ++ if len(filtered) == 1: ++ path = filtered[0] ++ if path is not None: ++ records.append(os.path.join(path, 'RECORD')) ++ else: ++ records.append(None) ++ return records ++ ++def rewheel_from_record(record_path, outdir): ++ """Recreates a whee of package with given record_path and returns path ++ to the newly created wheel.""" ++ site_dir = os.path.dirname(os.path.dirname(record_path)) ++ record_relpath = record_path[len(site_dir):].strip(os.path.sep) ++ to_write, to_omit = get_records_to_pack(site_dir, record_relpath) ++ new_wheel_name = get_wheel_name(record_path) ++ new_wheel_path = os.path.join(outdir, new_wheel_name + '.whl') ++ ++ new_wheel = zipfile.ZipFile(new_wheel_path, mode='w', compression=zipfile.ZIP_DEFLATED) ++ # we need to write a new record with just the files that we will write, ++ # e.g. not binaries and *.pyc/*.pyo files ++ new_record = io.StringIO() ++ writer = csv.writer(new_record) ++ ++ # handle files that we can write straight away ++ for f, sha_hash, size in to_write: ++ new_wheel.write(os.path.join(site_dir, f), arcname=f) ++ writer.writerow([f, sha_hash,size]) ++ ++ # rewrite the old wheel file with a new computed one ++ writer.writerow([record_relpath, '', '']) ++ new_wheel.writestr(record_relpath, new_record.getvalue()) ++ ++ new_wheel.close() ++ ++ return new_wheel.filename ++ ++def get_wheel_name(record_path): ++ """Return proper name of the wheel, without .whl.""" ++ ++ wheel_info_path = os.path.join(os.path.dirname(record_path), 'WHEEL') ++ with codecs.open(wheel_info_path, encoding='utf-8') as wheel_info_file: ++ wheel_info = email.parser.Parser().parsestr(wheel_info_file.read()) ++ ++ metadata_path = os.path.join(os.path.dirname(record_path), 'METADATA') ++ with codecs.open(metadata_path, encoding='utf-8') as metadata_file: ++ metadata = email.parser.Parser().parsestr(metadata_file.read()) ++ ++ # construct name parts according to wheel spec ++ distribution = metadata.get('Name') ++ version = metadata.get('Version') ++ build_tag = '' # nothing for now ++ lang_tag = [] ++ for t in wheel_info.get_all('Tag'): ++ lang_tag.append(t.split('-')[0]) ++ lang_tag = '.'.join(lang_tag) ++ abi_tag, plat_tag = wheel_info.get('Tag').split('-')[1:3] ++ # leave out build tag, if it is empty ++ to_join = filter(None, [distribution, version, build_tag, lang_tag, abi_tag, plat_tag]) ++ return '-'.join(list(to_join)) ++ ++def get_records_to_pack(site_dir, record_relpath): ++ """Accepts path of sitedir and path of RECORD file relative to it. ++ Returns two lists: ++ - list of files that can be written to new RECORD straight away ++ - list of files that shouldn't be written or need some processing ++ (pyc and pyo files, scripts) ++ """ ++ record_file_path = os.path.join(site_dir, record_relpath) ++ with codecs.open(record_file_path, encoding='utf-8') as record_file: ++ record_contents = record_file.read() ++ # temporary fix for https://github.com/pypa/pip/issues/1376 ++ # we need to ignore files under ".data" directory ++ data_dir = os.path.dirname(record_relpath).strip(os.path.sep) ++ data_dir = data_dir[:-len('dist-info')] + 'data' ++ ++ to_write = [] ++ to_omit = [] ++ for l in record_contents.splitlines(): ++ spl = l.split(',') ++ if len(spl) == 3: ++ # new record will omit (or write differently): ++ # - abs paths, paths with ".." (entry points), ++ # - pyc+pyo files ++ # - the old RECORD file ++ # TODO: is there any better way to recognize an entry point? ++ if os.path.isabs(spl[0]) or spl[0].startswith('..') or \ ++ spl[0].endswith('.pyc') or spl[0].endswith('.pyo') or \ ++ spl[0] == record_relpath or spl[0].startswith(data_dir): ++ to_omit.append(spl) ++ else: ++ to_write.append(spl) ++ else: ++ pass # bad RECORD or empty line ++ return to_write, to_omit +diff -Nur Python-3.4.1/Makefile.pre.in Python-3.4.1-rewheel/Makefile.pre.in +--- Python-3.4.1/Makefile.pre.in 2014-08-21 10:49:31.512695040 +0200 ++++ Python-3.4.1-rewheel/Makefile.pre.in 2014-08-21 10:10:41.961341722 +0200 +@@ -1145,7 +1145,7 @@ + test/test_asyncio \ + collections concurrent concurrent/futures encodings \ + email email/mime test/test_email test/test_email/data \ +- ensurepip ensurepip/_bundled \ ++ ensurepip ensurepip/_bundled ensurepip/rewheel \ + html json test/test_json http dbm xmlrpc \ + sqlite3 sqlite3/test \ + logging csv wsgiref urllib \ diff --git a/SOURCES/00205-make-libpl-respect-lib64.patch b/SOURCES/00205-make-libpl-respect-lib64.patch new file mode 100644 index 00000000..3e7c7974 --- /dev/null +++ b/SOURCES/00205-make-libpl-respect-lib64.patch @@ -0,0 +1,12 @@ +diff -up Python-3.5.0/Makefile.pre.in.lib Python-3.5.0/Makefile.pre.in +--- Python-3.5.0/Makefile.pre.in.lib 2015-09-21 15:39:47.928286620 +0200 ++++ Python-3.5.0/Makefile.pre.in 2015-09-21 15:42:58.004042762 +0200 +@@ -1340,7 +1340,7 @@ inclinstall: + + # Install the library and miscellaneous stuff needed for extending/embedding + # This goes into $(exec_prefix) +-LIBPL= @LIBPL@ ++LIBPL= $(LIBDEST)/config-$(LDVERSION)-$(MULTIARCH) + + # pkgconfig directory + LIBPC= $(LIBDIR)/pkgconfig diff --git a/SOURCES/00206-remove-hf-from-arm-triplet.patch b/SOURCES/00206-remove-hf-from-arm-triplet.patch new file mode 100644 index 00000000..c5f309c9 --- /dev/null +++ b/SOURCES/00206-remove-hf-from-arm-triplet.patch @@ -0,0 +1,15 @@ +diff -up Python-3.5.0/configure.ac.eabi Python-3.5.0/configure.ac +--- Python-3.5.0/configure.eabi 2015-09-23 13:52:20.756909744 +0200 ++++ Python-3.5.0/configure 2015-09-23 13:52:46.859163629 +0200 +@@ -762,9 +762,9 @@ cat >> conftest.c <> conftest.c <> conftest.c <> conftest.c <> conftest.c < maxlen: ++ out = b'(... truncated stdout ...)' + out[-maxlen:] ++ if len(err) > maxlen: ++ err = b'(... truncated stderr ...)' + err[-maxlen:] ++ out = out.decode('ascii', 'replace').rstrip() ++ err = err.decode('ascii', 'replace').rstrip() ++ raise AssertionError("Process return code is %d\n" ++ "command line: %r\n" ++ "\n" ++ "stdout:\n" ++ "---\n" ++ "%s\n" ++ "---\n" ++ "\n" ++ "stderr:\n" ++ "---\n" ++ "%s\n" ++ "---" ++ % (self.rc, cmd_line, ++ out, ++ err)) + + + # Executing the interpreter in a subprocess +@@ -110,30 +137,7 @@ def run_python_until_end(*args, **env_vars): + def _assert_python(expected_success, *args, **env_vars): + res, cmd_line = run_python_until_end(*args, **env_vars) + if (res.rc and expected_success) or (not res.rc and not expected_success): +- # Limit to 80 lines to ASCII characters +- maxlen = 80 * 100 +- out, err = res.out, res.err +- if len(out) > maxlen: +- out = b'(... truncated stdout ...)' + out[-maxlen:] +- if len(err) > maxlen: +- err = b'(... truncated stderr ...)' + err[-maxlen:] +- out = out.decode('ascii', 'replace').rstrip() +- err = err.decode('ascii', 'replace').rstrip() +- raise AssertionError("Process return code is %d\n" +- "command line: %r\n" +- "\n" +- "stdout:\n" +- "---\n" +- "%s\n" +- "---\n" +- "\n" +- "stderr:\n" +- "---\n" +- "%s\n" +- "---" +- % (res.rc, cmd_line, +- out, +- err)) ++ res.fail(cmd_line) + return res + + def assert_python_ok(*args, **env_vars): +diff --git a/Lib/test/test_c_locale_coercion.py b/Lib/test/test_c_locale_coercion.py +new file mode 100644 +index 0000000..635c98f +--- /dev/null ++++ b/Lib/test/test_c_locale_coercion.py +@@ -0,0 +1,371 @@ ++# Tests the attempted automatic coercion of the C locale to a UTF-8 locale ++ ++import unittest ++import locale ++import os ++import sys ++import sysconfig ++import shutil ++import subprocess ++from collections import namedtuple ++ ++import test.support ++from test.support.script_helper import ( ++ run_python_until_end, ++ interpreter_requires_environment, ++) ++ ++# Set our expectation for the default encoding used in the C locale ++# for the filesystem encoding and the standard streams ++ ++# AIX uses iso8859-1 in the C locale, other *nix platforms use ASCII ++if sys.platform.startswith("aix"): ++ C_LOCALE_STREAM_ENCODING = "iso8859-1" ++else: ++ C_LOCALE_STREAM_ENCODING = "ascii" ++ ++# FS encoding is UTF-8 on macOS, other *nix platforms use the locale encoding ++if sys.platform == "darwin": ++ C_LOCALE_FS_ENCODING = "utf-8" ++else: ++ C_LOCALE_FS_ENCODING = C_LOCALE_STREAM_ENCODING ++ ++# Note that the above is probably still wrong in some cases, such as: ++# * Windows when PYTHONLEGACYWINDOWSFSENCODING is set ++# * AIX and any other platforms that use latin-1 in the C locale ++# ++# Options for dealing with this: ++# * Don't set PYTHON_COERCE_C_LOCALE on such platforms (e.g. Windows doesn't) ++# * Fix the test expectations to match the actual platform behaviour ++ ++# In order to get the warning messages to match up as expected, the candidate ++# order here must much the target locale order in Python/pylifecycle.c ++_C_UTF8_LOCALES = ("C.UTF-8", "C.utf8", "UTF-8") ++ ++# There's no reliable cross-platform way of checking locale alias ++# lists, so the only way of knowing which of these locales will work ++# is to try them with locale.setlocale(). We do that in a subprocess ++# to avoid altering the locale of the test runner. ++# ++# If the relevant locale module attributes exist, and we're not on a platform ++# where we expect it to always succeed, we also check that ++# `locale.nl_langinfo(locale.CODESET)` works, as if it fails, the interpreter ++# will skip locale coercion for that particular target locale ++_check_nl_langinfo_CODESET = bool( ++ sys.platform not in ("darwin", "linux") and ++ hasattr(locale, "nl_langinfo") and ++ hasattr(locale, "CODESET") ++) ++ ++def _set_locale_in_subprocess(locale_name): ++ cmd_fmt = "import locale; print(locale.setlocale(locale.LC_CTYPE, '{}'))" ++ if _check_nl_langinfo_CODESET: ++ # If there's no valid CODESET, we expect coercion to be skipped ++ cmd_fmt += "; import sys; sys.exit(not locale.nl_langinfo(locale.CODESET))" ++ cmd = cmd_fmt.format(locale_name) ++ result, py_cmd = run_python_until_end("-c", cmd, __isolated=True) ++ return result.rc == 0 ++ ++ ++ ++_fields = "fsencoding stdin_info stdout_info stderr_info lang lc_ctype lc_all" ++_EncodingDetails = namedtuple("EncodingDetails", _fields) ++ ++class EncodingDetails(_EncodingDetails): ++ # XXX (ncoghlan): Using JSON for child state reporting may be less fragile ++ CHILD_PROCESS_SCRIPT = ";".join([ ++ "import sys, os", ++ "print(sys.getfilesystemencoding())", ++ "print(sys.stdin.encoding + ':' + sys.stdin.errors)", ++ "print(sys.stdout.encoding + ':' + sys.stdout.errors)", ++ "print(sys.stderr.encoding + ':' + sys.stderr.errors)", ++ "print(os.environ.get('LANG', 'not set'))", ++ "print(os.environ.get('LC_CTYPE', 'not set'))", ++ "print(os.environ.get('LC_ALL', 'not set'))", ++ ]) ++ ++ @classmethod ++ def get_expected_details(cls, coercion_expected, fs_encoding, stream_encoding, env_vars): ++ """Returns expected child process details for a given encoding""" ++ _stream = stream_encoding + ":{}" ++ # stdin and stdout should use surrogateescape either because the ++ # coercion triggered, or because the C locale was detected ++ stream_info = 2*[_stream.format("surrogateescape")] ++ # stderr should always use backslashreplace ++ stream_info.append(_stream.format("backslashreplace")) ++ expected_lang = env_vars.get("LANG", "not set").lower() ++ if coercion_expected: ++ expected_lc_ctype = CLI_COERCION_TARGET.lower() ++ else: ++ expected_lc_ctype = env_vars.get("LC_CTYPE", "not set").lower() ++ expected_lc_all = env_vars.get("LC_ALL", "not set").lower() ++ env_info = expected_lang, expected_lc_ctype, expected_lc_all ++ return dict(cls(fs_encoding, *stream_info, *env_info)._asdict()) ++ ++ @staticmethod ++ def _handle_output_variations(data): ++ """Adjust the output to handle platform specific idiosyncrasies ++ ++ * Some platforms report ASCII as ANSI_X3.4-1968 ++ * Some platforms report ASCII as US-ASCII ++ * Some platforms report UTF-8 instead of utf-8 ++ """ ++ data = data.replace(b"ANSI_X3.4-1968", b"ascii") ++ data = data.replace(b"US-ASCII", b"ascii") ++ data = data.lower() ++ return data ++ ++ @classmethod ++ def get_child_details(cls, env_vars): ++ """Retrieves fsencoding and standard stream details from a child process ++ ++ Returns (encoding_details, stderr_lines): ++ ++ - encoding_details: EncodingDetails for eager decoding ++ - stderr_lines: result of calling splitlines() on the stderr output ++ ++ The child is run in isolated mode if the current interpreter supports ++ that. ++ """ ++ result, py_cmd = run_python_until_end( ++ "-c", cls.CHILD_PROCESS_SCRIPT, ++ __isolated=True, ++ **env_vars ++ ) ++ if not result.rc == 0: ++ result.fail(py_cmd) ++ # All subprocess outputs in this test case should be pure ASCII ++ adjusted_output = cls._handle_output_variations(result.out) ++ stdout_lines = adjusted_output.decode("ascii").splitlines() ++ child_encoding_details = dict(cls(*stdout_lines)._asdict()) ++ stderr_lines = result.err.decode("ascii").rstrip().splitlines() ++ return child_encoding_details, stderr_lines ++ ++ ++# Details of the shared library warning emitted at runtime ++LEGACY_LOCALE_WARNING = ( ++ "Python runtime initialized with LC_CTYPE=C (a locale with default ASCII " ++ "encoding), which may cause Unicode compatibility problems. Using C.UTF-8, " ++ "C.utf8, or UTF-8 (if available) as alternative Unicode-compatible " ++ "locales is recommended." ++) ++ ++# Details of the CLI locale coercion warning emitted at runtime ++CLI_COERCION_WARNING_FMT = ( ++ "Python detected LC_CTYPE=C: LC_CTYPE coerced to {} (set another locale " ++ "or PYTHONCOERCECLOCALE=0 to disable this locale coercion behavior)." ++) ++ ++ ++AVAILABLE_TARGETS = None ++CLI_COERCION_TARGET = None ++CLI_COERCION_WARNING = None ++ ++def setUpModule(): ++ global AVAILABLE_TARGETS ++ global CLI_COERCION_TARGET ++ global CLI_COERCION_WARNING ++ ++ if AVAILABLE_TARGETS is not None: ++ # initialization already done ++ return ++ AVAILABLE_TARGETS = [] ++ ++ # Find the target locales available in the current system ++ for target_locale in _C_UTF8_LOCALES: ++ if _set_locale_in_subprocess(target_locale): ++ AVAILABLE_TARGETS.append(target_locale) ++ ++ if AVAILABLE_TARGETS: ++ # Coercion is expected to use the first available target locale ++ CLI_COERCION_TARGET = AVAILABLE_TARGETS[0] ++ CLI_COERCION_WARNING = CLI_COERCION_WARNING_FMT.format(CLI_COERCION_TARGET) ++ ++ ++class _LocaleHandlingTestCase(unittest.TestCase): ++ # Base class to check expected locale handling behaviour ++ ++ def _check_child_encoding_details(self, ++ env_vars, ++ expected_fs_encoding, ++ expected_stream_encoding, ++ expected_warnings, ++ coercion_expected): ++ """Check the C locale handling for the given process environment ++ ++ Parameters: ++ expected_fs_encoding: expected sys.getfilesystemencoding() result ++ expected_stream_encoding: expected encoding for standard streams ++ expected_warning: stderr output to expect (if any) ++ """ ++ result = EncodingDetails.get_child_details(env_vars) ++ encoding_details, stderr_lines = result ++ expected_details = EncodingDetails.get_expected_details( ++ coercion_expected, ++ expected_fs_encoding, ++ expected_stream_encoding, ++ env_vars ++ ) ++ self.assertEqual(encoding_details, expected_details) ++ if expected_warnings is None: ++ expected_warnings = [] ++ self.assertEqual(stderr_lines, expected_warnings) ++ ++ ++class LocaleConfigurationTests(_LocaleHandlingTestCase): ++ # Test explicit external configuration via the process environment ++ ++ def setUpClass(): ++ # This relies on setupModule() having been run, so it can't be ++ # handled via the @unittest.skipUnless decorator ++ if not AVAILABLE_TARGETS: ++ raise unittest.SkipTest("No C-with-UTF-8 locale available") ++ ++ def test_external_target_locale_configuration(self): ++ ++ # Explicitly setting a target locale should give the same behaviour as ++ # is seen when implicitly coercing to that target locale ++ self.maxDiff = None ++ ++ expected_fs_encoding = "utf-8" ++ expected_stream_encoding = "utf-8" ++ ++ base_var_dict = { ++ "LANG": "", ++ "LC_CTYPE": "", ++ "LC_ALL": "", ++ } ++ for env_var in ("LANG", "LC_CTYPE"): ++ for locale_to_set in AVAILABLE_TARGETS: ++ # XXX (ncoghlan): LANG=UTF-8 doesn't appear to work as ++ # expected, so skip that combination for now ++ # See https://bugs.python.org/issue30672 for discussion ++ if env_var == "LANG" and locale_to_set == "UTF-8": ++ continue ++ ++ with self.subTest(env_var=env_var, ++ configured_locale=locale_to_set): ++ var_dict = base_var_dict.copy() ++ var_dict[env_var] = locale_to_set ++ self._check_child_encoding_details(var_dict, ++ expected_fs_encoding, ++ expected_stream_encoding, ++ expected_warnings=None, ++ coercion_expected=False) ++ ++ ++ ++@test.support.cpython_only ++@unittest.skipUnless(sysconfig.get_config_var("PY_COERCE_C_LOCALE"), ++ "C locale coercion disabled at build time") ++class LocaleCoercionTests(_LocaleHandlingTestCase): ++ # Test implicit reconfiguration of the environment during CLI startup ++ ++ def _check_c_locale_coercion(self, ++ fs_encoding, stream_encoding, ++ coerce_c_locale, ++ expected_warnings=None, ++ coercion_expected=True, ++ **extra_vars): ++ """Check the C locale handling for various configurations ++ ++ Parameters: ++ fs_encoding: expected sys.getfilesystemencoding() result ++ stream_encoding: expected encoding for standard streams ++ coerce_c_locale: setting to use for PYTHONCOERCECLOCALE ++ None: don't set the variable at all ++ str: the value set in the child's environment ++ expected_warnings: expected warning lines on stderr ++ extra_vars: additional environment variables to set in subprocess ++ """ ++ self.maxDiff = None ++ ++ if not AVAILABLE_TARGETS: ++ # Locale coercion is disabled when there aren't any target locales ++ fs_encoding = C_LOCALE_FS_ENCODING ++ stream_encoding = C_LOCALE_STREAM_ENCODING ++ coercion_expected = False ++ if expected_warnings: ++ expected_warnings = [LEGACY_LOCALE_WARNING] ++ ++ base_var_dict = { ++ "LANG": "", ++ "LC_CTYPE": "", ++ "LC_ALL": "", ++ } ++ base_var_dict.update(extra_vars) ++ for env_var in ("LANG", "LC_CTYPE"): ++ for locale_to_set in ("", "C", "POSIX", "invalid.ascii"): ++ # XXX (ncoghlan): *BSD platforms don't behave as expected in the ++ # POSIX locale, so we skip that for now ++ # See https://bugs.python.org/issue30672 for discussion ++ if locale_to_set == "POSIX": ++ continue ++ with self.subTest(env_var=env_var, ++ nominal_locale=locale_to_set, ++ PYTHONCOERCECLOCALE=coerce_c_locale): ++ var_dict = base_var_dict.copy() ++ var_dict[env_var] = locale_to_set ++ if coerce_c_locale is not None: ++ var_dict["PYTHONCOERCECLOCALE"] = coerce_c_locale ++ # Check behaviour on successful coercion ++ self._check_child_encoding_details(var_dict, ++ fs_encoding, ++ stream_encoding, ++ expected_warnings, ++ coercion_expected) ++ ++ def test_test_PYTHONCOERCECLOCALE_not_set(self): ++ # This should coerce to the first available target locale by default ++ self._check_c_locale_coercion("utf-8", "utf-8", coerce_c_locale=None) ++ ++ def test_PYTHONCOERCECLOCALE_not_zero(self): ++ # *Any* string other than "0" is considered "set" for our purposes ++ # and hence should result in the locale coercion being enabled ++ for setting in ("", "1", "true", "false"): ++ self._check_c_locale_coercion("utf-8", "utf-8", coerce_c_locale=setting) ++ ++ def test_PYTHONCOERCECLOCALE_set_to_warn(self): ++ # PYTHONCOERCECLOCALE=warn enables runtime warnings for legacy locales ++ self._check_c_locale_coercion("utf-8", "utf-8", ++ coerce_c_locale="warn", ++ expected_warnings=[CLI_COERCION_WARNING]) ++ ++ ++ def test_PYTHONCOERCECLOCALE_set_to_zero(self): ++ # The setting "0" should result in the locale coercion being disabled ++ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING, ++ C_LOCALE_STREAM_ENCODING, ++ coerce_c_locale="0", ++ coercion_expected=False) ++ # Setting LC_ALL=C shouldn't make any difference to the behaviour ++ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING, ++ C_LOCALE_STREAM_ENCODING, ++ coerce_c_locale="0", ++ LC_ALL="C", ++ coercion_expected=False) ++ ++ def test_LC_ALL_set_to_C(self): ++ # Setting LC_ALL should render the locale coercion ineffective ++ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING, ++ C_LOCALE_STREAM_ENCODING, ++ coerce_c_locale=None, ++ LC_ALL="C", ++ coercion_expected=False) ++ # And result in a warning about a lack of locale compatibility ++ self._check_c_locale_coercion(C_LOCALE_FS_ENCODING, ++ C_LOCALE_STREAM_ENCODING, ++ coerce_c_locale="warn", ++ LC_ALL="C", ++ expected_warnings=[LEGACY_LOCALE_WARNING], ++ coercion_expected=False) ++ ++def test_main(): ++ test.support.run_unittest( ++ LocaleConfigurationTests, ++ LocaleCoercionTests ++ ) ++ test.support.reap_children() ++ ++if __name__ == "__main__": ++ test_main() +diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py +index 6e4286e..594dfa9 100644 +--- a/Lib/test/test_capi.py ++++ b/Lib/test/test_capi.py +@@ -425,32 +425,21 @@ class EmbeddingTests(unittest.TestCase): + def test_repeated_init_and_subinterpreters(self): + # This is just a "don't crash" test + out, err = self.run_embedded_interpreter('repeated_init_and_subinterpreters') +- if support.verbose: ++ if support.verbose > 1: + print() + print(out) + print(err) + +- @staticmethod +- def _get_default_pipe_encoding(): +- rp, wp = os.pipe() +- try: +- with os.fdopen(wp, 'w') as w: +- default_pipe_encoding = w.encoding +- finally: +- os.close(rp) +- return default_pipe_encoding +- + def test_forced_io_encoding(self): + # Checks forced configuration of embedded interpreter IO streams + env = dict(os.environ, PYTHONIOENCODING="utf-8:surrogateescape") + out, err = self.run_embedded_interpreter("forced_io_encoding", env=env) +- if support.verbose: ++ if support.verbose > 1: + print() + print(out) + print(err) + expected_stream_encoding = "utf-8" + expected_errors = "surrogateescape" +- expected_pipe_encoding = self._get_default_pipe_encoding() + expected_output = '\n'.join([ + "--- Use defaults ---", + "Expected encoding: default", +diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py +index ae2bcd4..0a302ff 100644 +--- a/Lib/test/test_cmd_line.py ++++ b/Lib/test/test_cmd_line.py +@@ -151,6 +152,7 @@ class CmdLineTest(unittest.TestCase): + env = os.environ.copy() + # Use C locale to get ascii for the locale encoding + env['LC_ALL'] = 'C' ++ env['PYTHONCOERCECLOCALE'] = '0' + code = ( + b'import locale; ' + b'print(ascii("' + undecodable + b'"), ' +diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py +index 7866a5c..b41239a 100644 +--- a/Lib/test/test_sys.py ++++ b/Lib/test/test_sys.py +@@ -680,6 +680,7 @@ class SysModuleTest(unittest.TestCase): + # Force the POSIX locale + env = os.environ.copy() + env["LC_ALL"] = "C" ++ env["PYTHONCOERCECLOCALE"] = "0" + code = '\n'.join(( + 'import sys', + 'def dump(name):', +diff --git a/Modules/main.c b/Modules/main.c +index b0fb78f..0d8590a 100644 +--- a/Modules/main.c ++++ b/Modules/main.c +@@ -105,7 +105,11 @@ static const char usage_6[] = + " predictable seed.\n" + "PYTHONMALLOC: set the Python memory allocators and/or install debug hooks\n" + " on Python memory allocators. Use PYTHONMALLOC=debug to install debug\n" +-" hooks.\n"; ++" hooks.\n" ++ ++"PYTHONCOERCECLOCALE: if this variable is set to 0, it disables the locale\n" ++" coercion behavior. Use PYTHONCOERCECLOCALE=warn to request display of\n" ++" locale coercion and locale compatibility warnings on stderr.\n"; + + static int + usage(int exitcode, const wchar_t* program) +diff --git a/Programs/_testembed.c b/Programs/_testembed.c +index b0f9087..da892bf 100644 +--- a/Programs/_testembed.c ++++ b/Programs/_testembed.c +@@ -1,4 +1,5 @@ + #include ++#include "pyconfig.h" + #include "pythread.h" + #include + +diff --git a/Programs/python.c b/Programs/python.c +index a7afbc7..03f8295 100644 +--- a/Programs/python.c ++++ b/Programs/python.c +@@ -15,6 +15,21 @@ wmain(int argc, wchar_t **argv) + } + #else + ++/* Access private pylifecycle helper API to better handle the legacy C locale ++ * ++ * The legacy C locale assumes ASCII as the default text encoding, which ++ * causes problems not only for the CPython runtime, but also other ++ * components like GNU readline. ++ * ++ * Accordingly, when the CLI detects it, it attempts to coerce it to a ++ * more capable UTF-8 based alternative. ++ * ++ * See the documentation of the PYTHONCOERCECLOCALE setting for more details. ++ * ++ */ ++extern int _Py_LegacyLocaleDetected(void); ++extern void _Py_CoerceLegacyLocale(void); ++ + int + main(int argc, char **argv) + { +@@ -25,7 +40,11 @@ main(int argc, char **argv) + char *oldloc; + + /* Force malloc() allocator to bootstrap Python */ ++#ifdef Py_DEBUG ++ (void)_PyMem_SetupAllocators("malloc_debug"); ++# else + (void)_PyMem_SetupAllocators("malloc"); ++# endif + + argv_copy = (wchar_t **)PyMem_RawMalloc(sizeof(wchar_t*) * (argc+1)); + argv_copy2 = (wchar_t **)PyMem_RawMalloc(sizeof(wchar_t*) * (argc+1)); +@@ -49,7 +68,21 @@ main(int argc, char **argv) + return 1; + } + ++#ifdef __ANDROID__ ++ /* Passing "" to setlocale() on Android requests the C locale rather ++ * than checking environment variables, so request C.UTF-8 explicitly ++ */ ++ setlocale(LC_ALL, "C.UTF-8"); ++#else ++ /* Reconfigure the locale to the default for this process */ + setlocale(LC_ALL, ""); ++#endif ++ ++ if (_Py_LegacyLocaleDetected()) { ++ _Py_CoerceLegacyLocale(); ++ } ++ ++ /* Convert from char to wchar_t based on the locale settings */ + for (i = 0; i < argc; i++) { + argv_copy[i] = Py_DecodeLocale(argv[i], NULL); + if (!argv_copy[i]) { +@@ -70,7 +103,11 @@ main(int argc, char **argv) + + /* Force again malloc() allocator to release memory blocks allocated + before Py_Main() */ ++#ifdef Py_DEBUG ++ (void)_PyMem_SetupAllocators("malloc_debug"); ++# else + (void)_PyMem_SetupAllocators("malloc"); ++# endif + + for (i = 0; i < argc; i++) { + PyMem_RawFree(argv_copy2[i]); +diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c +index 640271f..2a22b24 100644 +--- a/Python/pylifecycle.c ++++ b/Python/pylifecycle.c +@@ -167,6 +167,7 @@ Py_SetStandardStreamEncoding(const char *encoding, const char *errors) + return 0; + } + ++ + /* Global initializations. Can be undone by Py_FinalizeEx(). Don't + call this twice without an intervening Py_FinalizeEx() call. When + initializations fail, a fatal error is issued and the function does +@@ -301,6 +302,183 @@ import_init(PyInterpreterState *interp, PyObject *sysmod) + } + + ++/* Helper functions to better handle the legacy C locale ++ * ++ * The legacy C locale assumes ASCII as the default text encoding, which ++ * causes problems not only for the CPython runtime, but also other ++ * components like GNU readline. ++ * ++ * Accordingly, when the CLI detects it, it attempts to coerce it to a ++ * more capable UTF-8 based alternative as follows: ++ * ++ * if (_Py_LegacyLocaleDetected()) { ++ * _Py_CoerceLegacyLocale(); ++ * } ++ * ++ * See the documentation of the PYTHONCOERCECLOCALE setting for more details. ++ * ++ * Locale coercion also impacts the default error handler for the standard ++ * streams: while the usual default is "strict", the default for the legacy ++ * C locale and for any of the coercion target locales is "surrogateescape". ++ */ ++ ++int ++_Py_LegacyLocaleDetected(void) ++{ ++#ifndef MS_WINDOWS ++ /* On non-Windows systems, the C locale is considered a legacy locale */ ++ /* XXX (ncoghlan): some platforms (notably Mac OS X) don't appear to treat ++ * the POSIX locale as a simple alias for the C locale, so ++ * we may also want to check for that explicitly. ++ */ ++ const char *ctype_loc = setlocale(LC_CTYPE, NULL); ++ return ctype_loc != NULL && strcmp(ctype_loc, "C") == 0; ++#else ++ /* Windows uses code pages instead of locales, so no locale is legacy */ ++ return 0; ++#endif ++} ++ ++ ++static const char *_C_LOCALE_WARNING = ++ "Python runtime initialized with LC_CTYPE=C (a locale with default ASCII " ++ "encoding), which may cause Unicode compatibility problems. Using C.UTF-8, " ++ "C.utf8, or UTF-8 (if available) as alternative Unicode-compatible " ++ "locales is recommended.\n"; ++ ++static int ++_legacy_locale_warnings_enabled(void) ++{ ++ const char *coerce_c_locale = getenv("PYTHONCOERCECLOCALE"); ++ return (coerce_c_locale != NULL && ++ strncmp(coerce_c_locale, "warn", 5) == 0); ++} ++ ++static void ++_emit_stderr_warning_for_legacy_locale(void) ++{ ++ if (_legacy_locale_warnings_enabled()) { ++ if (_Py_LegacyLocaleDetected()) { ++ fprintf(stderr, "%s", _C_LOCALE_WARNING); ++ } ++ } ++} ++ ++typedef struct _CandidateLocale { ++ const char *locale_name; /* The locale to try as a coercion target */ ++} _LocaleCoercionTarget; ++ ++static _LocaleCoercionTarget _TARGET_LOCALES[] = { ++ {"C.UTF-8"}, ++ {"C.utf8"}, ++ {"UTF-8"}, ++ {NULL} ++}; ++ ++static char * ++get_default_standard_stream_error_handler(void) ++{ ++ const char *ctype_loc = setlocale(LC_CTYPE, NULL); ++ if (ctype_loc != NULL) { ++ /* "surrogateescape" is the default in the legacy C locale */ ++ if (strcmp(ctype_loc, "C") == 0) { ++ return "surrogateescape"; ++ } ++ ++#ifdef PY_COERCE_C_LOCALE ++ /* "surrogateescape" is the default in locale coercion target locales */ ++ const _LocaleCoercionTarget *target = NULL; ++ for (target = _TARGET_LOCALES; target->locale_name; target++) { ++ if (strcmp(ctype_loc, target->locale_name) == 0) { ++ return "surrogateescape"; ++ } ++ } ++#endif ++ } ++ ++ /* Otherwise return NULL to request the typical default error handler */ ++ return NULL; ++} ++ ++#ifdef PY_COERCE_C_LOCALE ++static const char *_C_LOCALE_COERCION_WARNING = ++ "Python detected LC_CTYPE=C: LC_CTYPE coerced to %.20s (set another locale " ++ "or PYTHONCOERCECLOCALE=0 to disable this locale coercion behavior).\n"; ++ ++static void ++_coerce_default_locale_settings(const _LocaleCoercionTarget *target) ++{ ++ ++ const char *newloc = target->locale_name; ++ ++ /* Reset locale back to currently configured defaults */ ++ setlocale(LC_ALL, ""); ++ ++ /* Set the relevant locale environment variable */ ++ if (setenv("LC_CTYPE", newloc, 1)) { ++ fprintf(stderr, ++ "Error setting LC_CTYPE, skipping C locale coercion\n"); ++ return; ++ } ++ if (_legacy_locale_warnings_enabled()) { ++ fprintf(stderr, _C_LOCALE_COERCION_WARNING, newloc); ++ } ++ ++ /* Reconfigure with the overridden environment variables */ ++ setlocale(LC_ALL, ""); ++} ++#endif ++ ++ ++void ++_Py_CoerceLegacyLocale(void) ++{ ++#ifdef PY_COERCE_C_LOCALE ++ /* We ignore the Python -E and -I flags here, as the CLI needs to sort out ++ * the locale settings *before* we try to do anything with the command ++ * line arguments. For cross-platform debugging purposes, we also need ++ * to give end users a way to force even scripts that are otherwise ++ * isolated from their environment to use the legacy ASCII-centric C ++ * locale. ++ * ++ * Ignoring -E and -I is safe from a security perspective, as we only use ++ * the setting to turn *off* the implicit locale coercion, and anyone with ++ * access to the process environment already has the ability to set ++ * `LC_ALL=C` to override the C level locale settings anyway. ++ */ ++ const char *coerce_c_locale = getenv("PYTHONCOERCECLOCALE"); ++ if (coerce_c_locale == NULL || strncmp(coerce_c_locale, "0", 2) != 0) { ++ /* PYTHONCOERCECLOCALE is not set, or is set to something other than "0" */ ++ const char *locale_override = getenv("LC_ALL"); ++ if (locale_override == NULL || *locale_override == '\0') { ++ /* LC_ALL is also not set (or is set to an empty string) */ ++ const _LocaleCoercionTarget *target = NULL; ++ for (target = _TARGET_LOCALES; target->locale_name; target++) { ++ const char *new_locale = setlocale(LC_CTYPE, ++ target->locale_name); ++ if (new_locale != NULL) { ++#if !defined(__APPLE__) && defined(HAVE_LANGINFO_H) && defined(CODESET) ++ /* Also ensure that nl_langinfo works in this locale */ ++ char *codeset = nl_langinfo(CODESET); ++ if (!codeset || *codeset == '\0') { ++ /* CODESET is not set or empty, so skip coercion */ ++ new_locale = NULL; ++ setlocale(LC_CTYPE, ""); ++ continue; ++ } ++#endif ++ /* Successfully configured locale, so make it the default */ ++ _coerce_default_locale_settings(target); ++ return; ++ } ++ } ++ } ++ } ++ /* No C locale warning here, as Py_Initialize will emit one later */ ++#endif ++} ++ ++ + void + _Py_InitializeEx_Private(int install_sigs, int install_importlib) + { +@@ -315,11 +493,19 @@ _Py_InitializeEx_Private(int install_sigs, int install_importlib) + initialized = 1; + _Py_Finalizing = NULL; + +-#ifdef HAVE_SETLOCALE ++#ifdef __ANDROID__ ++ /* Passing "" to setlocale() on Android requests the C locale rather ++ * than checking environment variables, so request C.UTF-8 explicitly ++ */ ++ setlocale(LC_CTYPE, "C.UTF-8"); ++#else ++#ifndef MS_WINDOWS + /* Set up the LC_CTYPE locale, so we can obtain + the locale's charset without having to switch + locales. */ + setlocale(LC_CTYPE, ""); ++ _emit_stderr_warning_for_legacy_locale(); ++#endif + #endif + + if ((p = Py_GETENV("PYTHONDEBUG")) && *p != '\0') +@@ -1251,12 +1437,8 @@ initstdio(void) + } + } + if (!errors && !(pythonioencoding && *pythonioencoding)) { +- /* When the LC_CTYPE locale is the POSIX locale ("C locale"), +- stdin and stdout use the surrogateescape error handler by +- default, instead of the strict error handler. */ +- char *loc = setlocale(LC_CTYPE, NULL); +- if (loc != NULL && strcmp(loc, "C") == 0) +- errors = "surrogateescape"; ++ /* Choose the default error handler based on the current locale */ ++ errors = get_default_standard_stream_error_handler(); + } + } + +diff --git a/configure b/configure +index 2915246..39e5a27 100755 +--- a/configure ++++ b/configure +@@ -834,6 +834,8 @@ with_thread + enable_ipv6 + with_doc_strings + with_pymalloc ++with_c_locale_coercion ++with_c_locale_warning + with_valgrind + with_dtrace + with_fpectl +@@ -1527,6 +1529,12 @@ Optional Packages: + deprecated; use --with(out)-threads + --with(out)-doc-strings disable/enable documentation strings + --with(out)-pymalloc disable/enable specialized mallocs ++ --with(out)-c-locale-coercion ++ disable/enable C locale coercion to a UTF-8 based ++ locale ++ --with(out)-c-locale-warning ++ disable/enable locale compatibility warning in the C ++ locale + --with-valgrind Enable Valgrind support + --with(out)-dtrace disable/enable DTrace support + --with-fpectl enable SIGFPE catching +@@ -11010,6 +11018,52 @@ fi + { $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_pymalloc" >&5 + $as_echo "$with_pymalloc" >&6; } + ++# Check for --with-c-locale-coercion ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-c-locale-coercion" >&5 ++$as_echo_n "checking for --with-c-locale-coercion... " >&6; } ++ ++# Check whether --with-c-locale-coercion was given. ++if test "${with_c_locale_coercion+set}" = set; then : ++ withval=$with_c_locale_coercion; ++fi ++ ++ ++if test -z "$with_c_locale_coercion" ++then ++ with_c_locale_coercion="yes" ++fi ++if test "$with_c_locale_coercion" != "no" ++then ++ ++$as_echo "#define PY_COERCE_C_LOCALE 1" >>confdefs.h ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_c_locale_coercion" >&5 ++$as_echo "$with_c_locale_coercion" >&6; } ++ ++# Check for --with-c-locale-warning ++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-c-locale-warning" >&5 ++$as_echo_n "checking for --with-c-locale-warning... " >&6; } ++ ++# Check whether --with-c-locale-warning was given. ++if test "${with_c_locale_warning+set}" = set; then : ++ withval=$with_c_locale_warning; ++fi ++ ++ ++if test -z "$with_c_locale_warning" ++then ++ with_c_locale_warning="yes" ++fi ++if test "$with_c_locale_warning" != "no" ++then ++ ++$as_echo "#define PY_WARN_ON_C_LOCALE 1" >>confdefs.h ++ ++fi ++{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_c_locale_warning" >&5 ++$as_echo "$with_c_locale_warning" >&6; } ++ + # Check for Valgrind support + { $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-valgrind" >&5 + $as_echo_n "checking for --with-valgrind... " >&6; } +diff --git a/configure.ac b/configure.ac +index 67dfba3..b9c9f04 100644 +--- a/configure.ac ++++ b/configure.ac +@@ -3279,6 +3279,40 @@ then + fi + AC_MSG_RESULT($with_pymalloc) + ++# Check for --with-c-locale-coercion ++AC_MSG_CHECKING(for --with-c-locale-coercion) ++AC_ARG_WITH(c-locale-coercion, ++ AS_HELP_STRING([--with(out)-c-locale-coercion], ++ [disable/enable C locale coercion to a UTF-8 based locale])) ++ ++if test -z "$with_c_locale_coercion" ++then ++ with_c_locale_coercion="yes" ++fi ++if test "$with_c_locale_coercion" != "no" ++then ++ AC_DEFINE(PY_COERCE_C_LOCALE, 1, ++ [Define if you want to coerce the C locale to a UTF-8 based locale]) ++fi ++AC_MSG_RESULT($with_c_locale_coercion) ++ ++# Check for --with-c-locale-warning ++AC_MSG_CHECKING(for --with-c-locale-warning) ++AC_ARG_WITH(c-locale-warning, ++ AS_HELP_STRING([--with(out)-c-locale-warning], ++ [disable/enable locale compatibility warning in the C locale])) ++ ++if test -z "$with_c_locale_warning" ++then ++ with_c_locale_warning="yes" ++fi ++if test "$with_c_locale_warning" != "no" ++then ++ AC_DEFINE(PY_WARN_ON_C_LOCALE, 1, ++ [Define to emit a locale compatibility warning in the C locale]) ++fi ++AC_MSG_RESULT($with_c_locale_warning) ++ + # Check for Valgrind support + AC_MSG_CHECKING([for --with-valgrind]) + AC_ARG_WITH([valgrind], +diff --git a/pyconfig.h.in b/pyconfig.h.in +index b10c57f..0a6f3e2 100644 +--- a/pyconfig.h.in ++++ b/pyconfig.h.in +@@ -1244,9 +1244,15 @@ + /* Define as the preferred size in bits of long digits */ + #undef PYLONG_BITS_IN_DIGIT + ++/* Define if you want to coerce the C locale to a UTF-8 based locale */ ++#undef PY_COERCE_C_LOCALE ++ + /* Define to printf format modifier for Py_ssize_t */ + #undef PY_FORMAT_SIZE_T + ++/* Define to emit a locale compatibility warning in the C locale */ ++#undef PY_WARN_ON_C_LOCALE ++ + /* Define if you want to build an interpreter with many run-time checks. */ + #undef Py_DEBUG + diff --git a/SOURCES/00292-restore-PyExc_RecursionErrorInst-symbol.patch b/SOURCES/00292-restore-PyExc_RecursionErrorInst-symbol.patch new file mode 100644 index 00000000..89671f89 --- /dev/null +++ b/SOURCES/00292-restore-PyExc_RecursionErrorInst-symbol.patch @@ -0,0 +1,106 @@ +diff --git a/Doc/whatsnew/3.6.rst b/Doc/whatsnew/3.6.rst +index 847b50140a6..570dc3ed6fe 100644 +--- a/Doc/whatsnew/3.6.rst ++++ b/Doc/whatsnew/3.6.rst +@@ -1852,10 +1852,10 @@ Build and C API Changes + * The :c:func:`PyUnicode_FSConverter` and :c:func:`PyUnicode_FSDecoder` + functions will now accept :term:`path-like objects `. + +-* The ``PyExc_RecursionErrorInst`` singleton that was part of the public API +- has been removed as its members being never cleared may cause a segfault +- during finalization of the interpreter. Contributed by Xavier de Gaye in +- :issue:`22898` and :issue:`30697`. ++* The ``PyExc_RecursionErrorInst`` singleton is not used anymore as its members ++ being never cleared may cause a segfault during finalization of the ++ interpreter. Contributed by Xavier de Gaye in :issue:`22898` and ++ :issue:`30697`. + + + Other Improvements +diff --git a/Include/pyerrors.h b/Include/pyerrors.h +index c28c1373f82..8c1dbc5047b 100644 +--- a/Include/pyerrors.h ++++ b/Include/pyerrors.h +@@ -219,6 +219,8 @@ PyAPI_DATA(PyObject *) PyExc_IOError; + PyAPI_DATA(PyObject *) PyExc_WindowsError; + #endif + ++PyAPI_DATA(PyObject *) PyExc_RecursionErrorInst; ++ + /* Predefined warning categories */ + PyAPI_DATA(PyObject *) PyExc_Warning; + PyAPI_DATA(PyObject *) PyExc_UserWarning; +diff --git a/Misc/NEWS.d/next/C API/2017-12-20-15-23-06.bpo-30697.v9FmgG.rst b/Misc/NEWS.d/next/C API/2017-12-20-15-23-06.bpo-30697.v9FmgG.rst +new file mode 100644 +index 00000000000..28f74ad4f30 +--- /dev/null ++++ b/Misc/NEWS.d/next/C API/2017-12-20-15-23-06.bpo-30697.v9FmgG.rst +@@ -0,0 +1 @@ ++Restore PyExc_RecursionErrorInst in 3.6 +diff --git a/Objects/exceptions.c b/Objects/exceptions.c +index df4899372a5..271e293e325 100644 +--- a/Objects/exceptions.c ++++ b/Objects/exceptions.c +@@ -2430,6 +2430,12 @@ SimpleExtendsException(PyExc_Warning, ResourceWarning, + + + ++/* Pre-computed RecursionError instance for when recursion depth is reached. ++ Meant to be used when normalizing the exception for exceeding the recursion ++ depth will cause its own infinite recursion. ++*/ ++PyObject *PyExc_RecursionErrorInst = NULL; ++ + #define PRE_INIT(TYPE) \ + if (!(_PyExc_ ## TYPE.tp_flags & Py_TPFLAGS_READY)) { \ + if (PyType_Ready(&_PyExc_ ## TYPE) < 0) \ +@@ -2691,11 +2697,37 @@ _PyExc_Init(PyObject *bltinmod) + ADD_ERRNO(TimeoutError, ETIMEDOUT); + + preallocate_memerrors(); ++ ++ if (!PyExc_RecursionErrorInst) { ++ PyExc_RecursionErrorInst = BaseException_new(&_PyExc_RecursionError, NULL, NULL); ++ if (!PyExc_RecursionErrorInst) ++ Py_FatalError("Cannot pre-allocate RecursionError instance for " ++ "recursion errors"); ++ else { ++ PyBaseExceptionObject *err_inst = ++ (PyBaseExceptionObject *)PyExc_RecursionErrorInst; ++ PyObject *args_tuple; ++ PyObject *exc_message; ++ exc_message = PyUnicode_FromString("maximum recursion depth exceeded"); ++ if (!exc_message) ++ Py_FatalError("cannot allocate argument for RecursionError " ++ "pre-allocation"); ++ args_tuple = PyTuple_Pack(1, exc_message); ++ if (!args_tuple) ++ Py_FatalError("cannot allocate tuple for RecursionError " ++ "pre-allocation"); ++ Py_DECREF(exc_message); ++ if (BaseException_init(err_inst, args_tuple, NULL)) ++ Py_FatalError("init of pre-allocated RecursionError failed"); ++ Py_DECREF(args_tuple); ++ } ++ } + } + + void + _PyExc_Fini(void) + { ++ Py_CLEAR(PyExc_RecursionErrorInst); + free_preallocated_memerrors(); + Py_CLEAR(errnomap); + } +diff --git a/PC/python3.def b/PC/python3.def +index 4fc4a6814ee..ff70718fc37 100644 +--- a/PC/python3.def ++++ b/PC/python3.def +@@ -224,6 +224,7 @@ EXPORTS + PyExc_PermissionError=python36.PyExc_PermissionError DATA + PyExc_ProcessLookupError=python36.PyExc_ProcessLookupError DATA + PyExc_RecursionError=python36.PyExc_RecursionError DATA ++ PyExc_RecursionErrorInst=python36.PyExc_RecursionErrorInst DATA + PyExc_ReferenceError=python36.PyExc_ReferenceError DATA + PyExc_ResourceWarning=python36.PyExc_ResourceWarning DATA + PyExc_RuntimeError=python36.PyExc_RuntimeError DATA diff --git a/SOURCES/00301-pathfix-add-n-option-for-no-backup.patch b/SOURCES/00301-pathfix-add-n-option-for-no-backup.patch new file mode 100644 index 00000000..350fe97b --- /dev/null +++ b/SOURCES/00301-pathfix-add-n-option-for-no-backup.patch @@ -0,0 +1,104 @@ +From 5affd5c29eb1493cb31ef3cfdde15538ac134689 Mon Sep 17 00:00:00 2001 +From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= +Date: Tue, 13 Mar 2018 10:56:43 +0100 +Subject: [PATCH] bpo-32885: Tools/scripts/pathfix.py: Add -n option for no + backup~ (#5772) + +Creating backup files with ~ suffix can be undesirable in some environment, +such as when building RPM packages. Instead of requiring the user to remove +those files manually, option -n was added, that simply disables this feature. + +-n was selected because 2to3 has the same option with this behavior. +--- + Misc/ACKS | 1 + + .../2018-02-20-12-16-47.bpo-32885.dL5x7C.rst | 2 ++ + Tools/scripts/pathfix.py | 28 +++++++++++++++------- + 3 files changed, 23 insertions(+), 8 deletions(-) + create mode 100644 Misc/NEWS.d/next/Tools-Demos/2018-02-20-12-16-47.bpo-32885.dL5x7C.rst + +diff --git a/Misc/ACKS b/Misc/ACKS +index d8179c8b03ab..d752d8a35434 100644 +--- a/Misc/ACKS ++++ b/Misc/ACKS +@@ -687,6 +687,7 @@ Ken Howard + Brad Howes + Mike Hoy + Ben Hoyt ++Miro Hrončok + Chiu-Hsiang Hsu + Chih-Hao Huang + Christian Hudon +diff --git a/Misc/NEWS.d/next/Tools-Demos/2018-02-20-12-16-47.bpo-32885.dL5x7C.rst b/Misc/NEWS.d/next/Tools-Demos/2018-02-20-12-16-47.bpo-32885.dL5x7C.rst +new file mode 100644 +index 000000000000..e003e1d84fd0 +--- /dev/null ++++ b/Misc/NEWS.d/next/Tools-Demos/2018-02-20-12-16-47.bpo-32885.dL5x7C.rst +@@ -0,0 +1,2 @@ ++Add an ``-n`` flag for ``Tools/scripts/pathfix.py`` to disbale automatic ++backup creation (files with ``~`` suffix). +diff --git a/Tools/scripts/pathfix.py b/Tools/scripts/pathfix.py +index 562bbc737812..c5bf984306a3 100755 +--- a/Tools/scripts/pathfix.py ++++ b/Tools/scripts/pathfix.py +@@ -7,8 +7,9 @@ + # Directories are searched recursively for files whose name looks + # like a python module. + # Symbolic links are always ignored (except as explicit directory +-# arguments). Of course, the original file is kept as a back-up +-# (with a "~" attached to its name). ++# arguments). ++# The original file is kept as a back-up (with a "~" attached to its name), ++# -n flag can be used to disable this. + # + # Undoubtedly you can do this using find and sed or perl, but this is + # a nice example of Python code that recurses down a directory tree +@@ -31,14 +32,17 @@ + + new_interpreter = None + preserve_timestamps = False ++create_backup = True ++ + + def main(): + global new_interpreter + global preserve_timestamps +- usage = ('usage: %s -i /interpreter -p file-or-directory ...\n' % ++ global create_backup ++ usage = ('usage: %s -i /interpreter -p -n file-or-directory ...\n' % + sys.argv[0]) + try: +- opts, args = getopt.getopt(sys.argv[1:], 'i:p') ++ opts, args = getopt.getopt(sys.argv[1:], 'i:pn') + except getopt.error as msg: + err(str(msg) + '\n') + err(usage) +@@ -48,6 +52,8 @@ def main(): + new_interpreter = a.encode() + if o == '-p': + preserve_timestamps = True ++ if o == '-n': ++ create_backup = False + if not new_interpreter or not new_interpreter.startswith(b'/') or \ + not args: + err('-i option or file-or-directory missing\n') +@@ -134,10 +140,16 @@ def fix(filename): + except OSError as msg: + err('%s: warning: chmod failed (%r)\n' % (tempname, msg)) + # Then make a backup of the original file as filename~ +- try: +- os.rename(filename, filename + '~') +- except OSError as msg: +- err('%s: warning: backup failed (%r)\n' % (filename, msg)) ++ if create_backup: ++ try: ++ os.rename(filename, filename + '~') ++ except OSError as msg: ++ err('%s: warning: backup failed (%r)\n' % (filename, msg)) ++ else: ++ try: ++ os.remove(filename) ++ except OSError as msg: ++ err('%s: warning: removing failed (%r)\n' % (filename, msg)) + # Now move the temp file to the original file + try: + os.rename(tempname, filename) diff --git a/SOURCES/Python-3.1.1-rpath.patch b/SOURCES/Python-3.1.1-rpath.patch new file mode 100644 index 00000000..9fae54c4 --- /dev/null +++ b/SOURCES/Python-3.1.1-rpath.patch @@ -0,0 +1,19 @@ +diff -up Python-3.1.1/Lib/distutils/unixccompiler.py.rpath Python-3.1.1/Lib/distutils/unixccompiler.py +--- Python-3.1.1/Lib/distutils/unixccompiler.py.rpath 2009-09-04 17:29:34.000000000 -0400 ++++ Python-3.1.1/Lib/distutils/unixccompiler.py 2009-09-04 17:49:54.000000000 -0400 +@@ -141,6 +141,15 @@ class UnixCCompiler(CCompiler): + if sys.platform == "cygwin": + exe_extension = ".exe" + ++ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): ++ """Remove standard library path from rpath""" ++ libraries, library_dirs, runtime_library_dirs = super()._fix_lib_args( ++ libraries, library_dirs, runtime_library_dirs) ++ libdir = sysconfig.get_config_var('LIBDIR') ++ if runtime_library_dirs and (libdir in runtime_library_dirs): ++ runtime_library_dirs.remove(libdir) ++ return libraries, library_dirs, runtime_library_dirs ++ + def preprocess(self, source, output_file=None, macros=None, + include_dirs=None, extra_preargs=None, extra_postargs=None): + fixed_args = self._fix_compile_args(None, macros, include_dirs) diff --git a/SOURCES/check-pyc-and-pyo-timestamps.py b/SOURCES/check-pyc-and-pyo-timestamps.py new file mode 100644 index 00000000..5fd11cc9 --- /dev/null +++ b/SOURCES/check-pyc-and-pyo-timestamps.py @@ -0,0 +1,50 @@ +"""Checks if all *.pyc and *.pyo files have later mtime than their *.py files.""" + +import imp +import os +import sys + +# list of test and other files that we expect not to have bytecode +not_compiled = [ + 'test/bad_coding.py', + 'test/bad_coding2.py', + 'test/badsyntax_3131.py', + 'test/badsyntax_future3.py', + 'test/badsyntax_future4.py', + 'test/badsyntax_future5.py', + 'test/badsyntax_future6.py', + 'test/badsyntax_future7.py', + 'test/badsyntax_future8.py', + 'test/badsyntax_future9.py', + 'test/badsyntax_future10.py', + 'test/badsyntax_pep3120.py', + 'lib2to3/tests/data/bom.py', + 'lib2to3/tests/data/crlf.py', + 'lib2to3/tests/data/different_encoding.py', + 'lib2to3/tests/data/false_encoding.py', + 'lib2to3/tests/data/py2_test_grammar.py', + '.debug-gdb.py', +] +failed = 0 + +def bytecode_expected(source): + for f in not_compiled: + if source.endswith(f): + return False + return True + +compiled = filter(lambda f: bytecode_expected(f), sys.argv[1:]) +for f in compiled: + # check both pyo and pyc + to_check = map(lambda b: imp.cache_from_source(f, b), (True, False)) + f_mtime = os.path.getmtime(f) + for c in to_check: + c_mtime = os.path.getmtime(c) + if c_mtime < f_mtime: + sys.stderr.write('Failed bytecompilation timestamps check: ') + sys.stderr.write('Bytecode file {} is older than source file {}.\n'.format(c, f)) + failed += 1 + +if failed: + sys.stderr.write('\n{} files failed bytecompilation timestamps check.\n'.format(failed)) + sys.exit(1) diff --git a/SOURCES/idle3.appdata.xml b/SOURCES/idle3.appdata.xml new file mode 100644 index 00000000..94f87a27 --- /dev/null +++ b/SOURCES/idle3.appdata.xml @@ -0,0 +1,35 @@ + + + + + idle3.desktop + IDLE3 + CC0 + Python-2.0 + Python 3 Integrated Development and Learning Environment + +

+ IDLE is Python’s Integrated Development and Learning Environment. + The GUI is uniform between Windows, Unix, and Mac OS X. + IDLE provides an easy way to start writing, running, and debugging + Python code. +

+

+ IDLE is written in pure Python, and uses the tkinter GUI toolkit. + It provides: +

+
    +
  • a Python shell window (interactive interpreter) with colorizing of code input, output, and error messages,
  • +
  • a multi-window text editor with multiple undo, Python colorizing, smart indent, call tips, auto completion, and other features,
  • +
  • search within any window, replace within editor windows, and search through multiple files (grep),
  • +
  • a debugger with persistent breakpoints, stepping, and viewing of global and local namespaces.
  • +
+
+ https://docs.python.org/3/library/idle.html + + http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-main-window.png + http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-class-browser.png + http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-code-viewer.png + + zbyszek@in.waw.pl +
diff --git a/SOURCES/idle3.desktop b/SOURCES/idle3.desktop new file mode 100644 index 00000000..dc1d3c34 --- /dev/null +++ b/SOURCES/idle3.desktop @@ -0,0 +1,11 @@ +[Desktop Entry] +Version=1.0 +Name=IDLE 3 +Comment=Python 3 Integrated Development and Learning Environment +Exec=idle3 %F +TryExec=idle3 +Terminal=false +Type=Application +Icon=idle3 +Categories=Development;IDE; +MimeType=text/x-python; \ No newline at end of file diff --git a/SOURCES/libpython.stp b/SOURCES/libpython.stp new file mode 100644 index 00000000..56cf2fb5 --- /dev/null +++ b/SOURCES/libpython.stp @@ -0,0 +1,17 @@ +/* Systemtap tapset to make it easier to trace Python */ + +/* + Define python.function.entry/return: +*/ +probe python.function.entry = process("python").library("LIBRARY_PATH").mark("function__entry") +{ + filename = user_string($arg1); + funcname = user_string($arg2); + lineno = $arg3; +} +probe python.function.return = process("python").library("LIBRARY_PATH").mark("function__return") +{ + filename = user_string($arg1); + funcname = user_string($arg2); + lineno = $arg3; +} diff --git a/SOURCES/macros.pybytecompile3.6 b/SOURCES/macros.pybytecompile3.6 new file mode 100644 index 00000000..f3199792 --- /dev/null +++ b/SOURCES/macros.pybytecompile3.6 @@ -0,0 +1,25 @@ +# Note that the path could itself be a python file, or a directory + +# Python's compile_all module only works on directories, and requires a max +# recursion depth + +# Note that the py_byte_compile macro should work for python2 as well +# Which unfortunately makes the definition more complicated than it should be +# The condition should be reversed once /usr/bin/python is python3! + +%py_byte_compile()\ +py2_byte_compile () {\ + python_binary="%1"\ + bytecode_compilation_path="%2"\ + find $bytecode_compilation_path -type f -a -name "*.py" -print0 | xargs -0 $python_binary -c 'import py_compile, sys; [py_compile.compile(f, dfile=f.partition("$RPM_BUILD_ROOT")[2]) for f in sys.argv[1:]]' || :\ + find $bytecode_compilation_path -type f -a -name "*.py" -print0 | xargs -0 $python_binary -O -c 'import py_compile, sys; [py_compile.compile(f, dfile=f.partition("$RPM_BUILD_ROOT")[2]) for f in sys.argv[1:]]' || :\ +}\ +\ +py3_byte_compile () {\ + python_binary="%1"\ + bytecode_compilation_path="%2"\ + find $bytecode_compilation_path -type f -a -name "*.py" -print0 | xargs -0 $python_binary -O -c 'import py_compile, sys; [py_compile.compile(f, dfile=f.partition("$RPM_BUILD_ROOT")[2], optimize=opt) for opt in range(2) for f in sys.argv[1:]]' || :\ +}\ +\ +[[ "%1" == *python3* ]] || py2_byte_compile "%1" "%2" && py3_byte_compile "%1" "%2" \ +%{nil} diff --git a/SOURCES/macros.systempython b/SOURCES/macros.systempython new file mode 100644 index 00000000..b7d1e913 --- /dev/null +++ b/SOURCES/macros.systempython @@ -0,0 +1,4 @@ +%system_python_abi %{expand: \ +%global __requires_exclude ^python\\\\(abi\\\\) = 3\\\\..$ +Requires: system-python(abi) = %{python3_version} +} diff --git a/SOURCES/pyfuntop.stp b/SOURCES/pyfuntop.stp new file mode 100644 index 00000000..f235a238 --- /dev/null +++ b/SOURCES/pyfuntop.stp @@ -0,0 +1,21 @@ +#!/usr/bin/stap + +global fn_calls; + +probe python.function.entry +{ + fn_calls[pid(), filename, funcname, lineno] += 1; +} + +probe timer.ms(1000) { + printf("\033[2J\033[1;1H") /* clear screen */ + printf("%6s %80s %6s %30s %6s\n", + "PID", "FILENAME", "LINE", "FUNCTION", "CALLS") + foreach ([pid, filename, funcname, lineno] in fn_calls- limit 20) { + printf("%6d %80s %6d %30s %6d\n", + pid, filename, lineno, funcname, + fn_calls[pid, filename, funcname, lineno]); + } + + delete fn_calls; +} diff --git a/SOURCES/python3-powerppc-arch.patch b/SOURCES/python3-powerppc-arch.patch new file mode 100644 index 00000000..10f02011 --- /dev/null +++ b/SOURCES/python3-powerppc-arch.patch @@ -0,0 +1,30 @@ +diff -up Python-3.5.0/configure.ac.than Python-3.5.0/configure.ac +--- Python-3.5.0/configure.ac.than 2015-11-13 11:51:32.039560172 -0500 ++++ Python-3.5.0/configure.ac 2015-11-13 11:52:11.670168157 -0500 +@@ -804,9 +804,9 @@ cat >> conftest.c <> conftest.c < %s in %s:%d\n", thread_indent(1), funcname, filename, lineno); +} + +probe python.function.return +{ + printf("%s <= %s in %s:%d\n", thread_indent(-1), funcname, filename, lineno); +} diff --git a/SPECS/python3.spec b/SPECS/python3.spec new file mode 100644 index 00000000..45006f9a --- /dev/null +++ b/SPECS/python3.spec @@ -0,0 +1,2641 @@ +# ====================================================== +# Conditionals and other variables controlling the build +# ====================================================== + +# NOTES ON BOOTSTRAPING PYTHON 3.6: +# +# Due to dependency cycle between Python, gdb, rpm, pip, setuptools and +# wheel, in order to rebase Python 3, one has to build in the following order: +# +# 1) gdb without python support (add %%global _without_python 1 on top of gdb's SPEC file) +# 2) python3 with with_rewheel set to 0 +# 3) gdb with python support (remove %%global _without_python 1 on top of gdb's SPEC file) +# 4) rpm +# 5) python-setuptools with bootstrap set to 1 +# 6) python-pip with build_wheel set to 0 +# 7) python-wheel with %%bcond_without bootstrap +# 8) python-setuptools with bootstrap set to 0 and also with_check set to 0 +# 9) python-pip with build_wheel set to 1 +# 10) pyparsing +# 11) python3 with with_rewheel set to 1 +# +# Then the most important packages have to be built, starting from their various leaf dependencies +# recursively. After these have been built, a targeted rebuild should be requested for the rest. +# Currently these packages are recommended to have been built before a targeted rebuild after a python abi change: +# python-sphinx, pytest, python-requests, cloud-init, dnf, anaconda, abrt. + +%global with_rewheel 1 + +%global pybasever 3.6 + +# pybasever without the dot: +%global pyshortver 36 + +%global pylibdir %{_libdir}/python%{pybasever} +%global dynload_dir %{pylibdir}/lib-dynload + +# SOABI is defined in the upstream configure.in from Python-3.2a2 onwards, +# for PEP 3149: +# http://www.python.org/dev/peps/pep-3149/ + +# ("configure.in" became "configure.ac" in Python 3.3 onwards, and in +# backports) + +# ABIFLAGS, LDVERSION and SOABI are in the upstream Makefile +# With Python 3.3, we lose the "u" suffix due to PEP 393 +%global ABIFLAGS_optimized m +%global ABIFLAGS_debug dm + +%global LDVERSION_optimized %{pybasever}%{ABIFLAGS_optimized} +%global LDVERSION_debug %{pybasever}%{ABIFLAGS_debug} + +%global SOABI_optimized cpython-%{pyshortver}%{ABIFLAGS_optimized}-%{_arch}-linux%{_gnu} +%global SOABI_debug cpython-%{pyshortver}%{ABIFLAGS_debug}-%{_arch}-linux%{_gnu} + +# All bytecode files are now in a __pycache__ subdirectory, with a name +# reflecting the version of the bytecode (to permit sharing of python libraries +# between different runtimes) +# See http://www.python.org/dev/peps/pep-3147/ +# For example, +# foo/bar.py +# now has bytecode at: +# foo/__pycache__/bar.cpython-36.pyc +# foo/__pycache__/bar.cpython-36.opt-1.pyc +# foo/__pycache__/bar.cpython-36.opt-2.pyc +%global bytecode_suffixes .cpython-36*.pyc + +# Python's configure script defines SOVERSION, and this is used in the Makefile +# to determine INSTSONAME, the name of the libpython DSO: +# LDLIBRARY='libpython$(VERSION).so' +# INSTSONAME="$LDLIBRARY".$SOVERSION +# We mirror this here in order to make it easier to add the -gdb.py hooks. +# (if these get out of sync, the payload of the libs subpackage will fail +# and halt the build) +%global py_SOVERSION 1.0 +%global py_INSTSONAME_optimized libpython%{LDVERSION_optimized}.so.%{py_SOVERSION} +%global py_INSTSONAME_debug libpython%{LDVERSION_debug}.so.%{py_SOVERSION} + +%global with_debug_build 1 + +%global with_gdb_hooks 1 + +%global with_systemtap 0 + +# some arches don't have valgrind so we need to disable its support on them +%ifnarch s390 %{mips} riscv64 +%global with_valgrind 1 +%else +%global with_valgrind 0 +%endif + +%global with_gdbm 1 + +# Change from yes to no to turn this off +%global with_computed_gotos yes + +# Turn this to 0 to turn off the "check" phase: +%global run_selftest_suite 1 + +# We want to byte-compile the .py files within the packages using the new +# python3 binary. +# +# Unfortunately, rpmbuild's infrastructure requires us to jump through some +# hoops to avoid byte-compiling with the system python 2 version: +# /usr/lib/rpm/redhat/macros sets up build policy that (amongst other things) +# defines __os_install_post. In particular, "brp-python-bytecompile" is +# invoked without an argument thus using the wrong version of python +# (/usr/bin/python, rather than the freshly built python), thus leading to +# numerous syntax errors, and incorrect magic numbers in the .pyc files. We +# thus override __os_install_post to avoid invoking this script: +%global __os_install_post /usr/lib/rpm/brp-compress \ + %{!?__debug_package:/usr/lib/rpm/brp-strip %{__strip}} \ + /usr/lib/rpm/brp-strip-static-archive %{__strip} \ + /usr/lib/rpm/brp-strip-comment-note %{__strip} %{__objdump} \ + /usr/lib/rpm/brp-python-hardlink +# to remove the invocation of brp-python-bytecompile, whilst keeping the +# invocation of brp-python-hardlink (since this should still work for python3 +# pyc/pyo files) + + +# ================== +# Top-level metadata +# ================== +Summary: Version 3 of the Python programming language aka Python 3000 +Name: python3 +# WARNING When rebasing to a new Python version, +# remember to update the python3-docs package as well +Version: %{pybasever}.5 +Release: 1%{?dist} +License: Python +Group: Development/Languages + + +# ======================= +# Build-time requirements +# ======================= + +# (keep this list alphabetized) + +BuildRequires: autoconf +BuildRequires: bluez-libs-devel +BuildRequires: bzip2 +BuildRequires: bzip2-devel + +# expat 2.1.0 added the symbol XML_SetHashSalt without bumping SONAME. We use +# it (in pyexpat) in order to enable the fix in Python-3.2.3 for CVE-2012-0876: +BuildRequires: expat-devel >= 2.1.0 + +BuildRequires: findutils +BuildRequires: gcc-c++ +%if %{with_gdbm} +BuildRequires: gdbm-devel +%endif +BuildRequires: glibc-devel +BuildRequires: gmp-devel +BuildRequires: libffi-devel +BuildRequires: libGL-devel +BuildRequires: libX11-devel +BuildRequires: ncurses-devel +# workaround http://bugs.python.org/issue19804 (test_uuid requires ifconfig) +BuildRequires: net-tools +BuildRequires: openssl-devel +BuildRequires: pkgconfig +BuildRequires: readline-devel +BuildRequires: sqlite-devel +BuildRequires: desktop-file-utils +BuildRequires: libappstream-glib +BuildRequires: gdb + +BuildRequires: systemtap-sdt-devel +BuildRequires: systemtap-devel +# (this introduces a dependency on "python", in that systemtap-sdt-devel's +# /usr/bin/dtrace is a python 2 script) +%global tapsetdir /usr/share/systemtap/tapset + +BuildRequires: tar +BuildRequires: tcl-devel +BuildRequires: tix-devel +BuildRequires: tk-devel + +%if 0%{?with_valgrind} +BuildRequires: valgrind-devel +%endif + +BuildRequires: xz-devel +BuildRequires: zlib-devel + +%if 0%{?with_rewheel} +BuildRequires: python3-setuptools +BuildRequires: python3-pip +%endif + + +# ======================= +# Source code and patches +# ======================= + +Source: https://www.python.org/ftp/python/%{version}/Python-%{version}.tar.xz + +# Supply an RPM macro "py_byte_compile" for the python3-devel subpackage +# to enable specfiles to selectively byte-compile individual files and paths +# with different Python runtimes as necessary: +Source3: macros.pybytecompile%{pybasever} + +# Systemtap tapset to make it easier to use the systemtap static probes +# (actually a template; LIBRARY_PATH will get fixed up during install) +# Written by dmalcolm; not yet sent upstream +Source5: libpython.stp + +# Example systemtap script using the tapset +# Written by wcohen, mjw, dmalcolm; not yet sent upstream +Source6: systemtap-example.stp + +# Another example systemtap script that uses the tapset +# Written by dmalcolm; not yet sent upstream +Source7: pyfuntop.stp + +# A simple script to check timestamps of bytecode files +# Run in check section with Python that is currently being built +# Written by bkabrda +Source8: check-pyc-and-pyo-timestamps.py + +# A simple macro that enables packages to require system-python(abi) instead of python(abi) +Source9: macros.systempython + +# Desktop menu entry for idle3 +Source10: idle3.desktop + +# AppData file for idle3 +Source11: idle3.appdata.xml + +# Fixup distutils/unixccompiler.py to remove standard library path from rpath: +# Was Patch0 in ivazquez' python3000 specfile: +Patch1: Python-3.1.1-rpath.patch + +# 00055 # +# Systemtap support: add statically-defined probe points +# Patch sent upstream as http://bugs.python.org/issue14776 +# with some subsequent reworking to cope with LANG=C in an rpmbuild +# (where sys.getfilesystemencoding() == 'ascii') +Patch55: 00055-systemtap.patch + +Patch102: 00102-lib64.patch + +# 00104 # +# Only used when "%{_lib}" == "lib64" +# Another lib64 fix, for distutils/tests/test_install.py; not upstream: +Patch104: 00104-lib64-fix-for-test_install.patch + +# 00111 # +# Patch the Makefile.pre.in so that the generated Makefile doesn't try to build +# a libpythonMAJOR.MINOR.a (bug 550692): +# Downstream only: not appropriate for upstream +Patch111: 00111-no-static-lib.patch + +# 00132 # +# Add non-standard hooks to unittest for use in the "check" phase below, when +# running selftests within the build: +# @unittest._skipInRpmBuild(reason) +# for tests that hang or fail intermittently within the build environment, and: +# @unittest._expectedFailureInRpmBuild +# for tests that always fail within the build environment +# +# The hooks only take effect if WITHIN_PYTHON_RPM_BUILD is set in the +# environment, which we set manually in the appropriate portion of the "check" +# phase below (and which potentially other python-* rpms could set, to reuse +# these unittest hooks in their own "check" phases) +Patch132: 00132-add-rpmbuild-hooks-to-unittest.patch + +# 00133 # +# 00133-skip-test_dl.patch is not relevant for python3: the "dl" module no +# longer exists + +# 00137 # +# Some tests within distutils fail when run in an rpmbuild: +Patch137: 00137-skip-distutils-tests-that-fail-in-rpmbuild.patch + +# 00146 # +# Support OpenSSL FIPS mode (e.g. when OPENSSL_FORCE_FIPS_MODE=1 is set) +# - handle failures from OpenSSL (e.g. on attempts to use MD5 in a +# FIPS-enforcing environment) +# - add a new "usedforsecurity" keyword argument to the various digest +# algorithms in hashlib so that you can whitelist a callsite with +# "usedforsecurity=False" +# (sent upstream for python 3 as http://bugs.python.org/issue9216 ; see RHEL6 +# python patch 119) +# - enforce usage of the _hashlib implementation: don't fall back to the _md5 +# and _sha* modules (leading to clearer error messages if fips selftests +# fail) +# - don't build the _md5 and _sha* modules; rely on the _hashlib implementation +# of hashlib +# (rhbz#563986) +# Note: Up to Python 3.4.0.b1, upstream had their own implementation of what +# they assumed would become sha3. This patch was adapted to give it the +# usedforsecurity argument, even though it did nothing (OpenSSL didn't have +# sha3 implementation at that time).In 3.4.0.b2, sha3 implementation was reverted +# (see http://bugs.python.org/issue16113), but the alterations were left in the +# patch, since they may be useful again if upstream decides to rerevert sha3 +# implementation and OpenSSL still doesn't support it. For now, they're harmless. +Patch146: 00146-hashlib-fips.patch + +# 00155 # +# Avoid allocating thunks in ctypes unless absolutely necessary, to avoid +# generating SELinux denials on "import ctypes" and "import uuid" when +# embedding Python within httpd (rhbz#814391) +Patch155: 00155-avoid-ctypes-thunks.patch + +# 00157 # +# Update uid/gid handling throughout the standard library: uid_t and gid_t are +# unsigned 32-bit values, but existing code often passed them through C long +# values, which are signed 32-bit values on 32-bit architectures, leading to +# negative int objects for uid/gid values >= 2^31 on 32-bit architectures. +# +# Introduce _PyObject_FromUid/Gid to convert uid_t/gid_t values to python +# objects, using int objects where the value will fit (long objects otherwise), +# and _PyArg_ParseUid/Gid to convert int/long to uid_t/gid_t, with -1 allowed +# as a special case (since this is given special meaning by the chown syscall) +# +# Update standard library to use this throughout for uid/gid values, so that +# very large uid/gid values are round-trippable, and -1 remains usable. +# (rhbz#697470) +Patch157: 00157-uid-gid-overflows.patch + +# 00160 # +# Python 3.3 added os.SEEK_DATA and os.SEEK_HOLE, which may be present in the +# header files in the build chroot, but may not be supported in the running +# kernel, hence we disable this test in an rpm build. +# Adding these was upstream issue http://bugs.python.org/issue10142 +# Not yet sent upstream +Patch160: 00160-disable-test_fs_holes-in-rpm-build.patch + +# 00163 # +# Some tests within test_socket fail intermittently when run inside Koji; +# disable them using unittest._skipInRpmBuild +# Not yet sent upstream +Patch163: 00163-disable-parts-of-test_socket-in-rpm-build.patch + +# 00170 # +# In debug builds, try to print repr() when a C-level assert fails in the +# garbage collector (typically indicating a reference-counting error +# somewhere else e.g in an extension module) +# Backported to 2.7 from a patch I sent upstream for py3k +# http://bugs.python.org/issue9263 (rhbz#614680) +# hiding the proposed new macros/functions within gcmodule.c to avoid exposing +# them within the extension API. +# (rhbz#850013 +Patch170: 00170-gc-assertions.patch + +# 00178 # +# Don't duplicate various FLAGS in sysconfig values +# http://bugs.python.org/issue17679 +# Does not affect python2 AFAICS (different sysconfig values initialization) +Patch178: 00178-dont-duplicate-flags-in-sysconfig.patch + +# 00180 # +# Enable building on ppc64p7 +# Not appropriate for upstream, Fedora-specific naming +Patch180: 00180-python-add-support-for-ppc64p7.patch + +# 00186 # +# Fix for https://bugzilla.redhat.com/show_bug.cgi?id=1023607 +# Previously, this fixed a problem where some *.py files were not being +# bytecompiled properly during build. This was result of py_compile.compile +# raising exception when trying to convert test file with bad encoding, and +# thus not continuing bytecompilation for other files. +# This was fixed upstream, but the test hasn't been merged yet, so we keep it +Patch186: 00186-dont-raise-from-py_compile.patch + +# 00188 # +# Downstream only patch that should be removed when we compile all guaranteed +# hashlib algorithms properly. The problem is this: +# - during tests, test_hashlib is imported and executed before test_lib2to3 +# - if at least one hash function has failed, trying to import it triggers an +# exception that is being caught and exception is logged: +# http://hg.python.org/cpython/file/2de806c8b070/Lib/hashlib.py#l217 +# - logging the exception makes logging module run basicConfig +# - when lib2to3 tests are run again, lib2to3 runs basicConfig again, which +# doesn't do anything, because it was run previously +# (logging.root.handlers != []), which means that the default setup +# (most importantly logging level) is not overriden. That means that a test +# relying on this will fail (test_filename_changing_on_output_single_dir) +Patch188: 00188-fix-lib2to3-tests-when-hashlib-doesnt-compile-properly.patch + +# 00189 # +# Add the rewheel module, allowing to recreate wheels from already installed +# ones +# https://github.com/bkabrda/rewheel +Patch189: 00189-add-rewheel-module.patch + +# 00205 # +# LIBPL variable in makefile takes LIBPL from configure.ac +# but the LIBPL variable defined there doesn't respect libdir macro +Patch205: 00205-make-libpl-respect-lib64.patch + +# 00206 # +# Remove hf flag from arm triplet which is used +# by debian but fedora infra uses only eabi without hf +Patch206: 00206-remove-hf-from-arm-triplet.patch + +# 00243 # +# Fix the triplet used on 64-bit MIPS +# rhbz#1322526: https://bugzilla.redhat.com/show_bug.cgi?id=1322526 +# Upstream uses Debian-like style mips64-linux-gnuabi64 +# Fedora needs the default mips64-linux-gnu +Patch243: 00243-fix-mips64-triplet.patch + +# 00262 # +# Backport of PEP 538: Coercing the legacy C locale to a UTF-8 based locale +# https://www.python.org/dev/peps/pep-0538/ +# Fedora Change: https://fedoraproject.org/wiki/Changes/python3_c.utf-8_locale +# Original proposal: https://bugzilla.redhat.com/show_bug.cgi?id=1404918 +Patch262: 00262-pep538_coerce_legacy_c_locale.patch + +# 00292 # +# Restore the public PyExc_RecursionErrorInst symbol that was removed +# from the 3.6.4 release upstream. +# Reported upstream: https://bugs.python.org/issue30697 +Patch292: 00292-restore-PyExc_RecursionErrorInst-symbol.patch + +# 00301 # +# Tools/scripts/pathfix.py: Add -n option for no backup~ +# See: https://bugzilla.redhat.com/show_bug.cgi?id=1546990 +# Fixed upstream: https://bugs.python.org/issue32885 +Patch301: 00301-pathfix-add-n-option-for-no-backup.patch + +# (New patches go here ^^^) +# +# When adding new patches to "python" and "python3" in Fedora, EL, etc., +# please try to keep the patch numbers in-sync between all specfiles. +# +# More information, and a patch number catalog, is at: +# +# https://fedoraproject.org/wiki/SIGs/Python/PythonPatches + +# add correct arch for ppc64/ppc64le +# it should be ppc64le-linux-gnu/ppc64-linux-gnu instead powerpc64le-linux-gnu/powerpc64-linux-gnu +Patch5001: python3-powerppc-arch.patch + +BuildRoot: %{_tmppath}/%{name}-%{version}-root + +# ====================================================== +# Additional metadata, and subpackages +# ====================================================== + +URL: https://www.python.org/ + +# See notes in bug 532118: +Provides: python(abi) = %{pybasever} + +Requires: %{name}-libs%{?_isa} = %{version}-%{release} + +# In order to support multiple python interpreters, apart from the system python3, +# for development purposes, new packages were introduced which can be installed in parallel +# with the main python3 package (e.g. 1369688), with the naming scheme 'python', +# however in order to keep the upgrade path clean we need to Obsolete and Provide +# these packages at the main python3 package. +Obsoletes: python%{pyshortver} +Provides: python%{pyshortver} = %{version}-%{release} + +%if 0%{with_rewheel} +Requires: python3-setuptools +Requires: python3-pip +%endif + +%description +Python is an interpreted, interactive, object-oriented programming +language often compared to Tcl, Perl, Scheme or Java. Python includes +modules, classes, exceptions, very high level dynamic data types +and dynamic typing. Python supports interfaces to many system calls and +libraries, as well as to various windowing systems (X11, Motif, Tk, +Mac and MFC). + +Programmers can write new built-in modules for Python in C or C++. +Python can be used as an extension language for applications that +need a programmable interface. + +Note that documentation for Python is provided in the python3-docs package. + +This package provides the "python3" executable; most of the actual +implementation is within the "python3-libs" and "system-python-libs" packages. + +%package libs +Summary: Python runtime libraries +Group: Development/Libraries +# For Modularity purpose we need not to include the dist-tag int he dependency +%if %(d="%{?dist}"; [ "${d#module-base-runtime-}x" != "${d}x" ] && echo 1 || echo 0) +Requires: system-python-libs%{?_isa} = %{version} +%else +Requires: system-python-libs%{?_isa} = %{version}-%{release} +%endif + +# expat 2.1.0 added the symbol XML_SetHashSalt without bumping SONAME. We use +# this symbol (in pyexpat), so we must explicitly state this dependency to +# prevent "import pyexpat" from failing with a linker error if someone hasn't +# yet upgraded expat: +Requires: expat >= 2.1.0 +Provides: python3-enum34 = 1.0.4-5%{?dist} +Obsoletes: python3-enum34 < 1.0.4-5%{?dist} + +%description libs +This package contains runtime libraries for use by Python: +- the libpython dynamic library, for use by applications that embed Python as +a scripting language, and by the main "python3" executable +- the Python standard library + +%package -n system-python +Summary: System Python executable +Group: Development/Libraries +Requires: system-python-libs%{?_isa} = %{version}-%{release} +Provides: system-python(abi) = %{pybasever} + +%description -n system-python +System Python provides a binary interpreter which uses system-python-libs, +a subset of standard Python library considered essential to run various tools, +requiring Python, that consider themselves "system tools". + +%package -n system-python-libs +Summary: System Python runtime libraries +Group: Development/Libraries + +%define __requires_exclude ^(/usr/bin/python3.*|python\\(abi\\) = 3\\..*)$ + +Requires: expat >= 2.1.0 +# Python 3 built with glibc >= 2.24.90-26 needs to require it (rhbz#1410644). +Requires: glibc%{?_isa} >= 2.24.90-26 + +%description -n system-python-libs +This package contains files used to embed System Python into applications. + +%package devel +Summary: Libraries and header files needed for Python development +Group: Development/Libraries +Requires: %{name} = %{version}-%{release} +Requires: %{name}-libs%{?_isa} = %{version}-%{release} +BuildRequires: python-rpm-macros +Requires: python-rpm-macros +Requires: python3-rpm-macros + +# https://bugzilla.redhat.com/show_bug.cgi?id=1217376 +# https://bugzilla.redhat.com/show_bug.cgi?id=1496757 +# https://bugzilla.redhat.com/show_bug.cgi?id=1218294 +# TODO change to a specific subpackage once available (#1218294) +Requires: redhat-rpm-config + +Conflicts: %{name} < %{version}-%{release} + +%description devel +The Python programming language's interpreter can be extended with +dynamically loaded extensions and can be embedded in other programs. +This package contains the header files and libraries needed to do +these types of tasks. + +Install python3-devel if you want to develop Python extensions. The +python3 package will also need to be installed. You'll probably also +want to install the python3-docs package, which contains Python +documentation. + +%package tools +Summary: A collection of tools included with Python +Group: Development/Tools +Requires: %{name} = %{version}-%{release} +Requires: %{name}-tkinter = %{version}-%{release} + +%description tools +This package contains several tools included with Python + +%package tkinter +Summary: A GUI toolkit for Python +Group: Development/Languages +Requires: %{name} = %{version}-%{release} + +%description tkinter +The Tkinter (Tk interface) program is a graphical user interface for +the Python scripting language. + +%package test +Summary: The test modules from the main python3 package +Group: Development/Languages +Requires: %{name} = %{version}-%{release} +Requires: %{name}-tools = %{version}-%{release} + +%description test +The test modules from the main %{name} package. +These are in a separate package to save space, as they are almost never used +in production. + +You might want to install the python3-test package if you're developing +python code that uses more than just unittest and/or test_support.py. + +%if 0%{?with_debug_build} +%package debug +Summary: Debug version of the Python runtime +Group: Applications/System + +# The debug build is an all-in-one package version of the regular build, and +# shares the same .py/.pyc files and directories as the regular build. Hence +# we depend on all of the subpackages of the regular build: +Requires: %{name}%{?_isa} = %{version}-%{release} +Requires: %{name}-libs%{?_isa} = %{version}-%{release} +Requires: %{name}-devel%{?_isa} = %{version}-%{release} +Requires: %{name}-test%{?_isa} = %{version}-%{release} +Requires: %{name}-tkinter%{?_isa} = %{version}-%{release} +Requires: %{name}-tools%{?_isa} = %{version}-%{release} + +%description debug +python3-debug provides a version of the Python runtime with numerous debugging +features enabled, aimed at advanced Python users, such as developers of Python +extension modules. + +This version uses more memory and will be slower than the regular Python build, +but is useful for tracking down reference-counting issues, and other bugs. + +The bytecodes are unchanged, so that .pyc files are compatible between the two +versions of Python, but the debugging features mean that C/C++ extension +modules are ABI-incompatible with those built for the standard runtime. + +It shares installation directories with the standard Python runtime, so that +.py and .pyc files can be shared. All compiled extension modules gain a "_d" +suffix ("foo_d.so" rather than "foo.so") so that each Python implementation +can load its own extensions. +%endif # with_debug_build + +# ====================================================== +# The prep phase of the build: +# ====================================================== + +%prep +%setup -q -n Python-%{version}%{?prerel} + +%if 0%{?with_systemtap} +# Provide an example of usage of the tapset: +cp -a %{SOURCE6} . +cp -a %{SOURCE7} . +%endif # with_systemtap + +# Ensure that we're using the system copy of various libraries, rather than +# copies shipped by upstream in the tarball: +# Remove embedded copy of expat: +rm -r Modules/expat || exit 1 + +# Remove embedded copy of zlib: +rm -r Modules/zlib || exit 1 + +## Disabling hashlib patch for now as it needs to be reimplemented +## for OpenSSL 1.1.0. +# Don't build upstream Python's implementation of these crypto algorithms; +# instead rely on _hashlib and OpenSSL. +# +# For example, in our builds hashlib.md5 is implemented within _hashlib via +# OpenSSL (and thus respects FIPS mode), and does not fall back to _md5 +# TODO: there seems to be no OpenSSL support in Python for sha3 so far +# when it is there, also remove _sha3/ dir +#for f in md5module.c sha1module.c sha256module.c sha512module.c; do +# rm Modules/$f +#done + +%if 0%{with_rewheel} +%global pip_version 9.0.3 +sed -r -i s/'_PIP_VERSION = "[0-9.]+"'/'_PIP_VERSION = "%{pip_version}"'/ Lib/ensurepip/__init__.py +%endif + +# +# Apply patches: +# +%patch1 -p1 + +%if 0%{?with_systemtap} +%patch55 -p1 -b .systemtap +%endif + +%if "%{_lib}" == "lib64" +%patch102 -p1 +%patch104 -p1 +%endif +%patch111 -p1 +%patch132 -p1 +%patch137 -p1 +#patch146 -p1 +%patch155 -p1 +%patch157 -p1 +%patch160 -p1 +%patch163 -p1 +%patch170 -p1 +%patch178 -p1 +%patch180 -p1 +%patch186 -p1 +%patch188 -p1 + +%if 0%{with_rewheel} +%patch189 -p1 +%endif + +%patch205 -p1 +%patch206 -p1 +%patch243 -p1 +%patch262 -p1 +%patch292 -p1 +%patch301 -p1 + +# Currently (2010-01-15), http://docs.python.org/library is for 2.6, and there +# are many differences between 2.6 and the Python 3 library. +# +# Fix up the URLs within pydoc to point at the documentation for this +# MAJOR.MINOR version: +# +sed --in-place \ + --expression="s|http://docs.python.org/library|http://docs.python.org/%{pybasever}/library|g" \ + Lib/pydoc.py || exit 1 + +%patch5001 -p1 + +# ====================================================== +# Configuring and building the code: +# ====================================================== + +%build +topdir=$(pwd) +export CFLAGS="$RPM_OPT_FLAGS -D_GNU_SOURCE -fPIC -fwrapv" +export CXXFLAGS="$RPM_OPT_FLAGS -D_GNU_SOURCE -fPIC -fwrapv" +export CPPFLAGS="`pkg-config --cflags-only-I libffi`" +export OPT="$RPM_OPT_FLAGS -D_GNU_SOURCE -fPIC -fwrapv" +export LINKCC="gcc" +export CFLAGS="$CFLAGS `pkg-config --cflags openssl`" +export LDFLAGS="$RPM_LD_FLAGS -g `pkg-config --libs-only-L openssl`" + + +# Define a function, for how to perform a "build" of python for a given +# configuration: +BuildPython() { + ConfName=$1 + BinaryName=$2 + SymlinkName=$3 + ExtraConfigArgs=$4 + PathFixWithThisBinary=$5 + MoreCFlags=$6 + + ConfDir=build/$ConfName + + echo STARTING: BUILD OF PYTHON FOR CONFIGURATION: $ConfName - %{_bindir}/$BinaryName + mkdir -p $ConfDir + + pushd $ConfDir + + # Use the freshly created "configure" script, but in the directory two above: + %global _configure $topdir/configure + +%configure \ + --enable-ipv6 \ + --enable-shared \ + --with-computed-gotos=%{with_computed_gotos} \ + --with-dbmliborder=gdbm:ndbm:bdb \ + --with-system-expat \ + --with-system-ffi \ + --enable-loadable-sqlite-extensions \ + --with-dtrace \ + --with-lto \ +%if 0%{?with_systemtap} + --with-systemtap \ +%endif +%if 0%{?with_valgrind} + --with-valgrind \ +%endif + $ExtraConfigArgs \ + %{nil} + + # Set EXTRA_CFLAGS to our CFLAGS (rather than overriding OPT, as we've done + # in the past). + # This should fix a problem with --with-valgrind where it adds + # -DDYNAMIC_ANNOTATIONS_ENABLED=1 + # to OPT which must be passed to all compilation units in the build, + # otherwise leading to linker errors, e.g. + # missing symbol AnnotateRWLockDestroy + # + # Invoke the build: + make EXTRA_CFLAGS="$CFLAGS $MoreCFlags" %{?_smp_mflags} + + popd + echo FINISHED: BUILD OF PYTHON FOR CONFIGURATION: $ConfDir +} + +# Use "BuildPython" to support building with different configurations: + +%if 0%{?with_debug_build} +BuildPython debug \ + python-debug \ + python%{pybasever}-debug \ +%ifarch %{ix86} x86_64 ppc %{power64} + "--with-pydebug --without-ensurepip" \ +%else + "--with-pydebug --without-ensurepip" \ +%endif + false \ + -O0 +%endif # with_debug_build + +BuildPython optimized \ + python \ + python%{pybasever} \ +%ifarch %{ix86} x86_64 + "--without-ensurepip --enable-optimizations" \ +%else + "--without-ensurepip" \ +%endif + true + +# ====================================================== +# Installing the built code: +# ====================================================== + +%install +topdir=$(pwd) +rm -fr %{buildroot} +mkdir -p %{buildroot}%{_prefix} %{buildroot}%{_mandir} + +InstallPython() { + + ConfName=$1 + PyInstSoName=$2 + MoreCFlags=$3 + + ConfDir=build/$ConfName + + echo STARTING: INSTALL OF PYTHON FOR CONFIGURATION: $ConfName + mkdir -p $ConfDir + + pushd $ConfDir + +make install DESTDIR=%{buildroot} INSTALL="install -p" EXTRA_CFLAGS="$MoreCFlags" + + popd + + # We install a collection of hooks for gdb that make it easier to debug + # executables linked against libpython3* (such as /usr/bin/python3 itself) + # + # These hooks are implemented in Python itself (though they are for the version + # of python that gdb is linked with, in this case Python 2.7) + # + # gdb-archer looks for them in the same path as the ELF file, with a -gdb.py suffix. + # We put them in the debuginfo package by installing them to e.g.: + # /usr/lib/debug/usr/lib/libpython3.2.so.1.0.debug-gdb.py + # + # See https://fedoraproject.org/wiki/Features/EasierPythonDebugging for more + # information + # + # Copy up the gdb hooks into place; the python file will be autoloaded by gdb + # when visiting libpython.so, provided that the python file is installed to the + # same path as the library (or its .debug file) plus a "-gdb.py" suffix, e.g: + # /usr/lib/debug/usr/lib64/libpython3.2.so.1.0.debug-gdb.py + # (note that the debug path is /usr/lib/debug for both 32/64 bit) + # + # Initially I tried: + # /usr/lib/libpython3.1.so.1.0-gdb.py + # but doing so generated noise when ldconfig was rerun (rhbz:562980) + # +%if 0%{?with_gdb_hooks} + DirHoldingGdbPy=%{_prefix}/lib/debug/%{_libdir} + PathOfGdbPy=$DirHoldingGdbPy/$PyInstSoName.debug-gdb.py + + mkdir -p %{buildroot}$DirHoldingGdbPy + cp Tools/gdb/libpython.py %{buildroot}$PathOfGdbPy +%endif # with_gdb_hooks + + echo FINISHED: INSTALL OF PYTHON FOR CONFIGURATION: $ConfName +} + +# Use "InstallPython" to support building with different configurations: + +# Install the "debug" build first, so that we can move some files aside +%if 0%{?with_debug_build} +InstallPython debug \ + %{py_INSTSONAME_debug} \ + -O0 +%endif # with_debug_build + +# Now the optimized build: +InstallPython optimized \ + %{py_INSTSONAME_optimized} + +install -d -m 0755 ${RPM_BUILD_ROOT}%{pylibdir}/site-packages/__pycache__ + +mv ${RPM_BUILD_ROOT}%{_bindir}/2to3 ${RPM_BUILD_ROOT}%{_bindir}/python3-2to3 + +# add idle3 to menu +install -D -m 0644 Lib/idlelib/Icons/idle_16.png ${RPM_BUILD_ROOT}%{_datadir}/icons/hicolor/16x16/apps/idle3.png +install -D -m 0644 Lib/idlelib/Icons/idle_32.png ${RPM_BUILD_ROOT}%{_datadir}/icons/hicolor/32x32/apps/idle3.png +install -D -m 0644 Lib/idlelib/Icons/idle_48.png ${RPM_BUILD_ROOT}%{_datadir}/icons/hicolor/48x48/apps/idle3.png +desktop-file-install --dir=${RPM_BUILD_ROOT}%{_datadir}/applications %{SOURCE10} + +# Install and validate appdata file +mkdir -p ${RPM_BUILD_ROOT}%{_datadir}/appdata +cp -a %{SOURCE11} ${RPM_BUILD_ROOT}%{_datadir}/appdata +appstream-util validate-relax --nonet ${RPM_BUILD_ROOT}%{_datadir}/appdata/idle3.appdata.xml + +# Development tools +install -m755 -d ${RPM_BUILD_ROOT}%{pylibdir}/Tools +install Tools/README ${RPM_BUILD_ROOT}%{pylibdir}/Tools/ +cp -ar Tools/freeze ${RPM_BUILD_ROOT}%{pylibdir}/Tools/ +cp -ar Tools/i18n ${RPM_BUILD_ROOT}%{pylibdir}/Tools/ +cp -ar Tools/pynche ${RPM_BUILD_ROOT}%{pylibdir}/Tools/ +cp -ar Tools/scripts ${RPM_BUILD_ROOT}%{pylibdir}/Tools/ + +# Documentation tools +install -m755 -d %{buildroot}%{pylibdir}/Doc +cp -ar Doc/tools %{buildroot}%{pylibdir}/Doc/ + +# Demo scripts +cp -ar Tools/demo %{buildroot}%{pylibdir}/Tools/ + +# Fix for bug #136654 +rm -f %{buildroot}%{pylibdir}/email/test/data/audiotest.au %{buildroot}%{pylibdir}/test/audiotest.au + +%if "%{_lib}" == "lib64" +install -d -m 0755 %{buildroot}/%{_prefix}/lib/python%{pybasever}/site-packages/__pycache__ +%endif + +# Make python3-devel multilib-ready (bug #192747, #139911) +%global _pyconfig32_h pyconfig-32.h +%global _pyconfig64_h pyconfig-64.h + +%ifarch %{power64} s390x x86_64 ia64 alpha sparc64 aarch64 %{mips64} riscv64 +%global _pyconfig_h %{_pyconfig64_h} +%else +%global _pyconfig_h %{_pyconfig32_h} +%endif + +# ABIFLAGS, LDVERSION and SOABI are in the upstream Makefile +%global ABIFLAGS_optimized m +%global ABIFLAGS_debug dm + +%global LDVERSION_optimized %{pybasever}%{ABIFLAGS_optimized} +%global LDVERSION_debug %{pybasever}%{ABIFLAGS_debug} + +%global SOABI_optimized cpython-%{pyshortver}%{ABIFLAGS_optimized}-%{_arch}-linux%{_gnu} +%global SOABI_debug cpython-%{pyshortver}%{ABIFLAGS_debug}-%{_arch}-linux%{_gnu} + +%if 0%{?with_debug_build} +%global PyIncludeDirs python%{LDVERSION_optimized} python%{LDVERSION_debug} + +%else +%global PyIncludeDirs python%{LDVERSION_optimized} +%endif + +for PyIncludeDir in %{PyIncludeDirs} ; do + mv %{buildroot}%{_includedir}/$PyIncludeDir/pyconfig.h \ + %{buildroot}%{_includedir}/$PyIncludeDir/%{_pyconfig_h} + cat > %{buildroot}%{_includedir}/$PyIncludeDir/pyconfig.h << EOF +#include + +#if __WORDSIZE == 32 +#include "%{_pyconfig32_h}" +#elif __WORDSIZE == 64 +#include "%{_pyconfig64_h}" +#else +#error "Unknown word size" +#endif +EOF +done + +# Fix for bug 201434: make sure distutils looks at the right pyconfig.h file +# Similar for sysconfig: sysconfig.get_config_h_filename tries to locate +# pyconfig.h so it can be parsed, and needs to do this at runtime in site.py +# when python starts up (bug 653058) +# +# Split this out so it goes directly to the pyconfig-32.h/pyconfig-64.h +# variants: +sed -i -e "s/'pyconfig.h'/'%{_pyconfig_h}'/" \ + %{buildroot}%{pylibdir}/distutils/sysconfig.py \ + %{buildroot}%{pylibdir}/sysconfig.py + +# Switch all shebangs to refer to the specific Python version. +LD_LIBRARY_PATH=./build/optimized ./build/optimized/python \ + Tools/scripts/pathfix.py \ + -i "%{_bindir}/python%{pybasever}" -pn \ + %{buildroot} \ + %{?with_gdb_hooks:%{buildroot}$DirHoldingGdbPy/*.py} + +# Remove shebang lines from .py files that aren't executable, and +# remove executability from .py files that don't have a shebang line: +find %{buildroot} -name \*.py \ + \( \( \! -perm /u+x,g+x,o+x -exec sed -e '/^#!/Q 0' -e 'Q 1' {} \; \ + -print -exec sed -i '1d' {} \; \) -o \( \ + -perm /u+x,g+x,o+x ! -exec grep -m 1 -q '^#!' {} \; \ + -exec chmod a-x {} \; \) \) + +# .xpm and .xbm files should not be executable: +find %{buildroot} \ + \( -name \*.xbm -o -name \*.xpm -o -name \*.xpm.1 \) \ + -exec chmod a-x {} \; + +# Remove executable flag from files that shouldn't have it: +chmod a-x \ + %{buildroot}%{pylibdir}/distutils/tests/Setup.sample \ + %{buildroot}%{pylibdir}/Tools/README + +# Get rid of DOS batch files: +find %{buildroot} -name \*.bat -exec rm {} \; + +# Get rid of backup files: +find %{buildroot}/ -name "*~" -exec rm -f {} \; +find . -name "*~" -exec rm -f {} \; +rm -f %{buildroot}%{pylibdir}/LICENSE.txt +# Junk, no point in putting in -test sub-pkg +rm -f ${RPM_BUILD_ROOT}/%{pylibdir}/idlelib/testcode.py* + +# Get rid of stray patch file from buildroot: +rm -f %{buildroot}%{pylibdir}/test/test_imp.py.apply-our-changes-to-expected-shebang # from patch 4 + +# Fix end-of-line encodings: +find %{buildroot}/ -name \*.py -exec sed -i 's/\r//' {} \; + +# Fix an encoding: +iconv -f iso8859-1 -t utf-8 %{buildroot}/%{pylibdir}/Demo/rpc/README > README.conv && mv -f README.conv %{buildroot}/%{pylibdir}/Demo/rpc/README + +# Note that +# %{pylibdir}/Demo/distutils/test2to3/setup.py +# is in iso-8859-1 encoding, and that this is deliberate; this is test data +# for the 2to3 tool, and one of the functions of the 2to3 tool is to fixup +# character encodings within python source code + +# Do bytecompilation with the newly installed interpreter. +# This is similar to the script in macros.pybytecompile +# compile *.pyc +find %{buildroot} -type f -a -name "*.py" -print0 | \ + LD_LIBRARY_PATH="%{buildroot}%{dynload_dir}/:%{buildroot}%{_libdir}" \ + PYTHONPATH="%{buildroot}%{_libdir}/python%{pybasever} %{buildroot}%{_libdir}/python%{pybasever}/site-packages" \ + xargs -0 %{buildroot}%{_bindir}/python%{pybasever} -O -c 'import py_compile, sys; [py_compile.compile(f, dfile=f.partition("%{buildroot}")[2], optimize=opt) for opt in range(3) for f in sys.argv[1:]]' || : + +# Fixup permissions for shared libraries from non-standard 555 to standard 755: +find %{buildroot} \ + -perm 555 -exec chmod 755 {} \; + +# Install macros for rpm: +mkdir -p %{buildroot}/%{_rpmconfigdir}/macros.d/ +install -m 644 %{SOURCE3} %{buildroot}/%{_rpmconfigdir}/macros.d/ +install -m 644 %{SOURCE9} %{buildroot}/%{_rpmconfigdir}/macros.d/ + +# Ensure that the curses module was linked against libncursesw.so, rather than +# libncurses.so (bug 539917) +ldd %{buildroot}/%{dynload_dir}/_curses*.so \ + | grep curses \ + | grep libncurses.so && (echo "_curses.so linked against libncurses.so" ; exit 1) + +# Ensure that the debug modules are linked against the debug libpython, and +# likewise for the optimized modules and libpython: +for Module in %{buildroot}/%{dynload_dir}/*.so ; do + case $Module in + *.%{SOABI_debug}) + ldd $Module | grep %{py_INSTSONAME_optimized} && + (echo Debug module $Module linked against optimized %{py_INSTSONAME_optimized} ; exit 1) + + ;; + *.%{SOABI_optimized}) + ldd $Module | grep %{py_INSTSONAME_debug} && + (echo Optimized module $Module linked against debug %{py_INSTSONAME_debug} ; exit 1) + ;; + esac +done + +# Create "/usr/bin/python3-debug", a symlink to the python3 debug binary, to +# avoid the user having to know the precise version and ABI flags. (see +# e.g. rhbz#676748): +%if 0%{?with_debug_build} +ln -s \ + %{_bindir}/python%{LDVERSION_debug} \ + %{buildroot}%{_bindir}/python3-debug +%endif + +# +# Systemtap hooks: +# +%if 0%{?with_systemtap} +# Install a tapset for this libpython into tapsetdir, fixing up the path to the +# library: +mkdir -p %{buildroot}%{tapsetdir} +%ifarch %{power64} s390x x86_64 ia64 alpha sparc64 aarch64 %{mips64} +%global libpython_stp_optimized libpython%{pybasever}-64.stp +%global libpython_stp_debug libpython%{pybasever}-debug-64.stp +%else +%global libpython_stp_optimized libpython%{pybasever}-32.stp +%global libpython_stp_debug libpython%{pybasever}-debug-32.stp +%endif + +sed \ + -e "s|LIBRARY_PATH|%{_libdir}/%{py_INSTSONAME_optimized}|" \ + %{_sourcedir}/libpython.stp \ + > %{buildroot}%{tapsetdir}/%{libpython_stp_optimized} + +%if 0%{?with_debug_build} +# In Python 3, python3 and python3-debug don't point to the same binary, +# so we have to replace "python3" with "python3-debug" to get systemtap +# working with debug build +sed \ + -e "s|LIBRARY_PATH|%{_libdir}/%{py_INSTSONAME_debug}|" \ + -e 's|"python3"|"python3-debug"|' \ + %{_sourcedir}/libpython.stp \ + > %{buildroot}%{tapsetdir}/%{libpython_stp_debug} +%endif # with_debug_build + +%endif # with_systemtap + +# Rename the -devel script that differs on different arches to arch specific name +mv %{buildroot}%{_bindir}/python%{LDVERSION_optimized}-{,`uname -m`-}config +echo -e '#!/bin/sh\nexec `dirname $0`/python%{LDVERSION_optimized}-`uname -m`-config "$@"' > \ + %{buildroot}%{_bindir}/python%{LDVERSION_optimized}-config +echo '[ $? -eq 127 ] && echo "Could not find python%{LDVERSION_optimized}-`uname -m`-config. Look around to see available arches." >&2' >> \ + %{buildroot}%{_bindir}/python%{LDVERSION_optimized}-config + chmod +x %{buildroot}%{_bindir}/python%{LDVERSION_optimized}-config + +# Rename the -debug script that differs on different arches to arch specific name +mv %{buildroot}%{_bindir}/python%{LDVERSION_debug}-{,`uname -m`-}config +echo -e '#!/bin/sh\nexec `dirname $0`/python%{LDVERSION_debug}-`uname -m`-config "$@"' > \ + %{buildroot}%{_bindir}/python%{LDVERSION_debug}-config +echo '[ $? -eq 127 ] && echo "Could not find python%{LDVERSION_debug}-`uname -m`-config. Look around to see available arches." >&2' >> \ + %{buildroot}%{_bindir}/python%{LDVERSION_debug}-config + chmod +x %{buildroot}%{_bindir}/python%{LDVERSION_debug}-config + +# System Python: Copy the executable to libexec +mkdir -p %{buildroot}%{_libexecdir} +cp %{buildroot}%{_bindir}/python%{pybasever} %{buildroot}%{_libexecdir}/system-python + +# ====================================================== +# Running the upstream test suite +# ====================================================== + +%check + +# first of all, check timestamps of bytecode files +find %{buildroot} -type f -a -name "*.py" -print0 | \ + LD_LIBRARY_PATH="%{buildroot}%{dynload_dir}/:%{buildroot}%{_libdir}" \ + PYTHONPATH="%{buildroot}%{_libdir}/python%{pybasever} %{buildroot}%{_libdir}/python%{pybasever}/site-packages" \ + xargs -0 %{buildroot}%{_bindir}/python%{pybasever} %{SOURCE8} + +# For ppc64 we need a larger stack than default (rhbz#1292462) +%ifarch %{power64} + ulimit -a + ulimit -s 16384 +%endif + +topdir=$(pwd) +CheckPython() { + ConfName=$1 + ConfDir=$(pwd)/build/$ConfName + + echo STARTING: CHECKING OF PYTHON FOR CONFIGURATION: $ConfName + + # Note that we're running the tests using the version of the code in the + # builddir, not in the buildroot. + + # Run the upstream test suite, setting "WITHIN_PYTHON_RPM_BUILD" so that the + # our non-standard decorators take effect on the relevant tests: + # @unittest._skipInRpmBuild(reason) + # @unittest._expectedFailureInRpmBuild + # test_faulthandler.test_register_chain currently fails on ppc64le and + # aarch64, see upstream bug http://bugs.python.org/issue21131 + WITHIN_PYTHON_RPM_BUILD= \ + LD_LIBRARY_PATH=$ConfDir $ConfDir/python -m test.regrtest \ + -wW --slowest --findleaks \ + -x test_distutils \ + %ifarch ppc64le aarch64 + -x test_faulthandler \ + %endif + %ifarch %{mips64} + -x test_ctypes \ + %endif + %ifarch %{power64} s390 s390x armv7hl aarch64 %{mips} + -x test_gdb + %endif + + echo FINISHED: CHECKING OF PYTHON FOR CONFIGURATION: $ConfName + +} + +%if 0%{run_selftest_suite} + +# Check each of the configurations: +%if 0%{?with_debug_build} +CheckPython debug +%endif # with_debug_build +CheckPython optimized + +%endif # run_selftest_suite + + +# ====================================================== +# Cleaning up +# ====================================================== + +%clean +rm -fr %{buildroot} + + +# ====================================================== +# Scriptlets +# ====================================================== + +%post libs -p /sbin/ldconfig + +%postun libs -p /sbin/ldconfig + +%post -n system-python-libs -p /sbin/ldconfig + +%postun -n system-python-libs -p /sbin/ldconfig + +%post +/bin/touch --no-create %{_datadir}/icons/hicolor &>/dev/null || : + +%postun +if [ $1 -eq 0 ] ; then + /bin/touch --no-create %{_datadir}/icons/hicolor &>/dev/null + /usr/bin/gtk-update-icon-cache %{_datadir}/icons/hicolor &>/dev/null || : +fi + +%posttrans +/usr/bin/gtk-update-icon-cache %{_datadir}/icons/hicolor &>/dev/null || : + +%files +%defattr(-, root, root) +%license LICENSE +%doc README.rst +%{_bindir}/pydoc* +%{_bindir}/python3 +%{_bindir}/python%{pybasever} +%{_bindir}/python%{pybasever}m +%{_bindir}/pyvenv +%{_bindir}/pyvenv-%{pybasever} +%{_mandir}/*/* + +%files libs +%defattr(-,root,root,-) +%license LICENSE +%doc README.rst + +%{pylibdir}/lib2to3 +%exclude %{pylibdir}/lib2to3/tests + +%dir %{pylibdir}/unittest/ +%dir %{pylibdir}/unittest/__pycache__/ +%{pylibdir}/unittest/*.py +%{pylibdir}/unittest/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/asyncio/ +%dir %{pylibdir}/asyncio/__pycache__/ +%{pylibdir}/asyncio/*.py +%{pylibdir}/asyncio/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/venv/ +%dir %{pylibdir}/venv/__pycache__/ +%{pylibdir}/venv/*.py +%{pylibdir}/venv/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/venv/scripts + +%{pylibdir}/wsgiref +%{pylibdir}/xmlrpc + +%dir %{pylibdir}/ensurepip/ +%dir %{pylibdir}/ensurepip/__pycache__/ +%{pylibdir}/ensurepip/*.py +%{pylibdir}/ensurepip/__pycache__/*%{bytecode_suffixes} +%exclude %{pylibdir}/ensurepip/_bundled + +%if 0%{?with_rewheel} +%dir %{pylibdir}/ensurepip/rewheel/ +%dir %{pylibdir}/ensurepip/rewheel/__pycache__/ +%{pylibdir}/ensurepip/rewheel/*.py +%{pylibdir}/ensurepip/rewheel/__pycache__/*%{bytecode_suffixes} +%endif + +%{pylibdir}/idlelib + +%dir %{pylibdir}/test/ +%dir %{pylibdir}/test/__pycache__/ +%dir %{pylibdir}/test/support/ +%dir %{pylibdir}/test/support/__pycache__/ +%{pylibdir}/test/__init__.py +%{pylibdir}/test/__pycache__/__init__%{bytecode_suffixes} +%{pylibdir}/test/support/__init__.py +%{pylibdir}/test/support/__pycache__/__init__%{bytecode_suffixes} + +%dir %{pylibdir}/concurrent/ +%dir %{pylibdir}/concurrent/__pycache__/ +%{pylibdir}/concurrent/*.py +%{pylibdir}/concurrent/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/concurrent/futures/ +%dir %{pylibdir}/concurrent/futures/__pycache__/ +%{pylibdir}/concurrent/futures/*.py +%{pylibdir}/concurrent/futures/__pycache__/*%{bytecode_suffixes} + +%{pylibdir}/pydoc_data + +################################################################################## + +%files -n system-python +%defattr(-,root,root,-) +%license LICENSE +%doc README.rst +%{_libexecdir}/system-python + +%files -n system-python-libs +%defattr(-,root,root,-) +%license LICENSE +%doc README.rst +%dir %{pylibdir} +%dir %{dynload_dir} + +%{dynload_dir}/_blake2.%{SOABI_optimized}.so +%{dynload_dir}/_md5.%{SOABI_optimized}.so +%{dynload_dir}/_sha1.%{SOABI_optimized}.so +%{dynload_dir}/_sha256.%{SOABI_optimized}.so +%{dynload_dir}/_sha3.%{SOABI_optimized}.so +%{dynload_dir}/_sha512.%{SOABI_optimized}.so + +%{dynload_dir}/_asyncio.%{SOABI_optimized}.so +%{dynload_dir}/_bisect.%{SOABI_optimized}.so +%{dynload_dir}/_bz2.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_cn.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_hk.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_iso2022.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_jp.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_kr.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_tw.%{SOABI_optimized}.so +%{dynload_dir}/_crypt.%{SOABI_optimized}.so +%{dynload_dir}/_csv.%{SOABI_optimized}.so +%{dynload_dir}/_ctypes.%{SOABI_optimized}.so +%{dynload_dir}/_curses.%{SOABI_optimized}.so +%{dynload_dir}/_curses_panel.%{SOABI_optimized}.so +%{dynload_dir}/_dbm.%{SOABI_optimized}.so +%{dynload_dir}/_decimal.%{SOABI_optimized}.so +%{dynload_dir}/_elementtree.%{SOABI_optimized}.so +%if %{with_gdbm} +%{dynload_dir}/_gdbm.%{SOABI_optimized}.so +%endif +%{dynload_dir}/_hashlib.%{SOABI_optimized}.so +%{dynload_dir}/_heapq.%{SOABI_optimized}.so +%{dynload_dir}/_json.%{SOABI_optimized}.so +%{dynload_dir}/_lsprof.%{SOABI_optimized}.so +%{dynload_dir}/_lzma.%{SOABI_optimized}.so +%{dynload_dir}/_multibytecodec.%{SOABI_optimized}.so +%{dynload_dir}/_multiprocessing.%{SOABI_optimized}.so +%{dynload_dir}/_opcode.%{SOABI_optimized}.so +%{dynload_dir}/_pickle.%{SOABI_optimized}.so +%{dynload_dir}/_posixsubprocess.%{SOABI_optimized}.so +%{dynload_dir}/_random.%{SOABI_optimized}.so +%{dynload_dir}/_socket.%{SOABI_optimized}.so +%{dynload_dir}/_sqlite3.%{SOABI_optimized}.so +%{dynload_dir}/_ssl.%{SOABI_optimized}.so +%{dynload_dir}/_struct.%{SOABI_optimized}.so +%{dynload_dir}/array.%{SOABI_optimized}.so +%{dynload_dir}/audioop.%{SOABI_optimized}.so +%{dynload_dir}/binascii.%{SOABI_optimized}.so +%{dynload_dir}/cmath.%{SOABI_optimized}.so +%{dynload_dir}/_datetime.%{SOABI_optimized}.so +%{dynload_dir}/fcntl.%{SOABI_optimized}.so +%{dynload_dir}/grp.%{SOABI_optimized}.so +%{dynload_dir}/math.%{SOABI_optimized}.so +%{dynload_dir}/mmap.%{SOABI_optimized}.so +%{dynload_dir}/nis.%{SOABI_optimized}.so +%{dynload_dir}/ossaudiodev.%{SOABI_optimized}.so +%{dynload_dir}/parser.%{SOABI_optimized}.so +%{dynload_dir}/pyexpat.%{SOABI_optimized}.so +%{dynload_dir}/readline.%{SOABI_optimized}.so +%{dynload_dir}/resource.%{SOABI_optimized}.so +%{dynload_dir}/select.%{SOABI_optimized}.so +%{dynload_dir}/spwd.%{SOABI_optimized}.so +%{dynload_dir}/syslog.%{SOABI_optimized}.so +%{dynload_dir}/termios.%{SOABI_optimized}.so +#%{dynload_dir}/time.%{SOABI_optimized}.so +%{dynload_dir}/_testmultiphase.%{SOABI_optimized}.so +%{dynload_dir}/unicodedata.%{SOABI_optimized}.so +%{dynload_dir}/xxlimited.%{SOABI_optimized}.so +%{dynload_dir}/zlib.%{SOABI_optimized}.so + +%dir %{pylibdir}/site-packages/ +%dir %{pylibdir}/site-packages/__pycache__/ +%{pylibdir}/site-packages/README.txt +%{pylibdir}/*.py +%dir %{pylibdir}/__pycache__/ +%{pylibdir}/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/collections/ +%dir %{pylibdir}/collections/__pycache__/ +%{pylibdir}/collections/*.py +%{pylibdir}/collections/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/ctypes/ +%dir %{pylibdir}/ctypes/__pycache__/ +%{pylibdir}/ctypes/*.py +%{pylibdir}/ctypes/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/ctypes/macholib + +%{pylibdir}/curses + +%dir %{pylibdir}/dbm/ +%dir %{pylibdir}/dbm/__pycache__/ +%{pylibdir}/dbm/*.py +%{pylibdir}/dbm/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/distutils/ +%dir %{pylibdir}/distutils/__pycache__/ +%{pylibdir}/distutils/*.py +%{pylibdir}/distutils/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/distutils/README +%{pylibdir}/distutils/command +%exclude %{pylibdir}/distutils/command/wininst-*.exe + +%dir %{pylibdir}/email/ +%dir %{pylibdir}/email/__pycache__/ +%{pylibdir}/email/*.py +%{pylibdir}/email/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/email/mime +%doc %{pylibdir}/email/architecture.rst + +%{pylibdir}/encodings + +%{pylibdir}/html +%{pylibdir}/http + +%dir %{pylibdir}/importlib/ +%dir %{pylibdir}/importlib/__pycache__/ +%{pylibdir}/importlib/*.py +%{pylibdir}/importlib/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/json/ +%dir %{pylibdir}/json/__pycache__/ +%{pylibdir}/json/*.py +%{pylibdir}/json/__pycache__/*%{bytecode_suffixes} + +%{pylibdir}/logging +%{pylibdir}/multiprocessing + +%dir %{pylibdir}/sqlite3/ +%dir %{pylibdir}/sqlite3/__pycache__/ +%{pylibdir}/sqlite3/*.py +%{pylibdir}/sqlite3/__pycache__/*%{bytecode_suffixes} + +%exclude %{pylibdir}/turtle.py +%exclude %{pylibdir}/__pycache__/turtle*%{bytecode_suffixes} + +%{pylibdir}/urllib +%{pylibdir}/xml + +%if "%{_lib}" == "lib64" +%attr(0755,root,root) %dir %{_prefix}/lib/python%{pybasever} +%attr(0755,root,root) %dir %{_prefix}/lib/python%{pybasever}/site-packages +%attr(0755,root,root) %dir %{_prefix}/lib/python%{pybasever}/site-packages/__pycache__/ +%endif + +# "Makefile" and the config-32/64.h file are needed by +# distutils/sysconfig.py:_init_posix(), so we include them in the core +# package, along with their parent directories (bug 531901): +%dir %{pylibdir}/config-%{LDVERSION_optimized}-%{_arch}-linux%{_gnu}/ +%{pylibdir}/config-%{LDVERSION_optimized}-%{_arch}-linux%{_gnu}/Makefile +%dir %{_includedir}/python%{LDVERSION_optimized}/ +%{_includedir}/python%{LDVERSION_optimized}/%{_pyconfig_h} + +%{_libdir}/%{py_INSTSONAME_optimized} +%{_libdir}/libpython3.so +%if 0%{?with_systemtap} +%dir %(dirname %{tapsetdir}) +%dir %{tapsetdir} +%{tapsetdir}/%{libpython_stp_optimized} +%doc systemtap-example.stp pyfuntop.stp +%endif + +%files devel +%defattr(-,root,root) +%{pylibdir}/config-%{LDVERSION_optimized}-%{_arch}-linux%{_gnu}/* +%exclude %{pylibdir}/config-%{LDVERSION_optimized}-%{_arch}-linux%{_gnu}/Makefile +%{pylibdir}/distutils/command/wininst-*.exe +%{_includedir}/python%{LDVERSION_optimized}/*.h +%exclude %{_includedir}/python%{LDVERSION_optimized}/%{_pyconfig_h} +%doc Misc/README.valgrind Misc/valgrind-python.supp Misc/gdbinit +%{_bindir}/python3-config +%{_bindir}/python%{pybasever}-config +%{_bindir}/python%{LDVERSION_optimized}-config +%{_bindir}/python%{LDVERSION_optimized}-*-config +%{_libdir}/libpython%{LDVERSION_optimized}.so +%{_libdir}/pkgconfig/python-%{LDVERSION_optimized}.pc +%{_libdir}/pkgconfig/python-%{pybasever}.pc +%{_libdir}/pkgconfig/python3.pc +%{_rpmconfigdir}/macros.d/macros.pybytecompile%{pybasever} +%{_rpmconfigdir}/macros.d/macros.systempython + +%files tools +%defattr(-,root,root,755) +%{_bindir}/python3-2to3 +%{_bindir}/2to3-%{pybasever} +%{_bindir}/idle* +%{pylibdir}/Tools +%doc %{pylibdir}/Doc +%{_datadir}/appdata/idle3.appdata.xml +%{_datadir}/applications/idle3.desktop +%{_datadir}/icons/hicolor/*/apps/idle3.* + +%files tkinter +%defattr(-,root,root,755) +%{pylibdir}/tkinter +%exclude %{pylibdir}/tkinter/test +%{dynload_dir}/_tkinter.%{SOABI_optimized}.so +%{pylibdir}/turtle.py +%{pylibdir}/__pycache__/turtle*%{bytecode_suffixes} +%dir %{pylibdir}/turtledemo +%{pylibdir}/turtledemo/*.py +%{pylibdir}/turtledemo/*.cfg +%dir %{pylibdir}/turtledemo/__pycache__/ +%{pylibdir}/turtledemo/__pycache__/*%{bytecode_suffixes} + +%files test +%defattr(-, root, root) +%{pylibdir}/ctypes/test +%{pylibdir}/distutils/tests +%{pylibdir}/sqlite3/test +%{pylibdir}/test +%{dynload_dir}/_ctypes_test.%{SOABI_optimized}.so +%{dynload_dir}/_testbuffer.%{SOABI_optimized}.so +%{dynload_dir}/_testcapi.%{SOABI_optimized}.so +%{dynload_dir}/_testimportmultiple.%{SOABI_optimized}.so +%{pylibdir}/lib2to3/tests +%{pylibdir}/tkinter/test +%{pylibdir}/unittest/test + + +# We don't bother splitting the debug build out into further subpackages: +# if you need it, you're probably a developer. + +# Hence the manifest is the combination of analogous files in the manifests of +# all of the other subpackages + +%if 0%{?with_debug_build} +%files debug +%defattr(-,root,root,-) + +# Analog of the core subpackage's files: +%{_bindir}/python%{LDVERSION_debug} +%{_bindir}/python3-debug + +# Analog of the -libs subpackage's files: +# ...with debug builds of the built-in "extension" modules: + +%{dynload_dir}/_blake2.%{SOABI_debug}.so +%{dynload_dir}/_md5.%{SOABI_debug}.so +%{dynload_dir}/_sha1.%{SOABI_debug}.so +%{dynload_dir}/_sha256.%{SOABI_debug}.so +%{dynload_dir}/_sha3.%{SOABI_debug}.so +%{dynload_dir}/_sha512.%{SOABI_debug}.so + +%{dynload_dir}/_asyncio.%{SOABI_debug}.so +%{dynload_dir}/_bisect.%{SOABI_debug}.so +%{dynload_dir}/_bz2.%{SOABI_debug}.so +%{dynload_dir}/_codecs_cn.%{SOABI_debug}.so +%{dynload_dir}/_codecs_hk.%{SOABI_debug}.so +%{dynload_dir}/_codecs_iso2022.%{SOABI_debug}.so +%{dynload_dir}/_codecs_jp.%{SOABI_debug}.so +%{dynload_dir}/_codecs_kr.%{SOABI_debug}.so +%{dynload_dir}/_codecs_tw.%{SOABI_debug}.so +%{dynload_dir}/_crypt.%{SOABI_debug}.so +%{dynload_dir}/_csv.%{SOABI_debug}.so +%{dynload_dir}/_ctypes.%{SOABI_debug}.so +%{dynload_dir}/_curses.%{SOABI_debug}.so +%{dynload_dir}/_curses_panel.%{SOABI_debug}.so +%{dynload_dir}/_dbm.%{SOABI_debug}.so +%{dynload_dir}/_decimal.%{SOABI_debug}.so +%{dynload_dir}/_elementtree.%{SOABI_debug}.so +%if %{with_gdbm} +%{dynload_dir}/_gdbm.%{SOABI_debug}.so +%endif +%{dynload_dir}/_hashlib.%{SOABI_debug}.so +%{dynload_dir}/_heapq.%{SOABI_debug}.so +%{dynload_dir}/_json.%{SOABI_debug}.so +%{dynload_dir}/_lsprof.%{SOABI_debug}.so +%{dynload_dir}/_lzma.%{SOABI_debug}.so +%{dynload_dir}/_multibytecodec.%{SOABI_debug}.so +%{dynload_dir}/_multiprocessing.%{SOABI_debug}.so +%{dynload_dir}/_opcode.%{SOABI_debug}.so +%{dynload_dir}/_pickle.%{SOABI_debug}.so +%{dynload_dir}/_posixsubprocess.%{SOABI_debug}.so +%{dynload_dir}/_random.%{SOABI_debug}.so +%{dynload_dir}/_socket.%{SOABI_debug}.so +%{dynload_dir}/_sqlite3.%{SOABI_debug}.so +%{dynload_dir}/_ssl.%{SOABI_debug}.so +%{dynload_dir}/_struct.%{SOABI_debug}.so +%{dynload_dir}/array.%{SOABI_debug}.so +%{dynload_dir}/audioop.%{SOABI_debug}.so +%{dynload_dir}/binascii.%{SOABI_debug}.so +%{dynload_dir}/cmath.%{SOABI_debug}.so +%{dynload_dir}/_datetime.%{SOABI_debug}.so +%{dynload_dir}/fcntl.%{SOABI_debug}.so +%{dynload_dir}/grp.%{SOABI_debug}.so +%{dynload_dir}/math.%{SOABI_debug}.so +%{dynload_dir}/mmap.%{SOABI_debug}.so +%{dynload_dir}/nis.%{SOABI_debug}.so +%{dynload_dir}/ossaudiodev.%{SOABI_debug}.so +%{dynload_dir}/parser.%{SOABI_debug}.so +%{dynload_dir}/pyexpat.%{SOABI_debug}.so +%{dynload_dir}/readline.%{SOABI_debug}.so +%{dynload_dir}/resource.%{SOABI_debug}.so +%{dynload_dir}/select.%{SOABI_debug}.so +%{dynload_dir}/spwd.%{SOABI_debug}.so +%{dynload_dir}/syslog.%{SOABI_debug}.so +%{dynload_dir}/termios.%{SOABI_debug}.so +#%{dynload_dir}/time.%{SOABI_debug}.so +%{dynload_dir}/_testmultiphase.%{SOABI_debug}.so +%{dynload_dir}/unicodedata.%{SOABI_debug}.so +%{dynload_dir}/zlib.%{SOABI_debug}.so + +# No need to split things out the "Makefile" and the config-32/64.h file as we +# do for the regular build above (bug 531901), since they're all in one package +# now; they're listed below, under "-devel": + +%{_libdir}/%{py_INSTSONAME_debug} +%if 0%{?with_systemtap} +%dir %(dirname %{tapsetdir}) +%dir %{tapsetdir} +%{tapsetdir}/%{libpython_stp_debug} +%endif + +# Analog of the -devel subpackage's files: +%{pylibdir}/config-%{LDVERSION_debug}-%{_arch}-linux%{_gnu} +%{_includedir}/python%{LDVERSION_debug} +%{_bindir}/python%{LDVERSION_debug}-config +%{_bindir}/python%{LDVERSION_debug}-*-config +%{_libdir}/libpython%{LDVERSION_debug}.so +%{_libdir}/libpython%{LDVERSION_debug}.so.1.0 +%{_libdir}/pkgconfig/python-%{LDVERSION_debug}.pc + +# Analog of the -tools subpackage's files: +# None for now; we could build precanned versions that have the appropriate +# shebang if needed + +# Analog of the tkinter subpackage's files: +%{dynload_dir}/_tkinter.%{SOABI_debug}.so + +# Analog of the -test subpackage's files: +%{dynload_dir}/_ctypes_test.%{SOABI_debug}.so +%{dynload_dir}/_testbuffer.%{SOABI_debug}.so +%{dynload_dir}/_testcapi.%{SOABI_debug}.so +%{dynload_dir}/_testimportmultiple.%{SOABI_debug}.so + +%endif # with_debug_build + +# We put the debug-gdb.py file inside /usr/lib/debug to avoid noise from +# ldconfig (rhbz:562980). +# +# The /usr/lib/rpm/redhat/macros defines %__debug_package to use +# debugfiles.list, and it appears that everything below /usr/lib/debug and +# (/usr/src/debug) gets added to this file (via LISTFILES) in +# /usr/lib/rpm/find-debuginfo.sh +# +# Hence by installing it below /usr/lib/debug we ensure it is added to the +# -debuginfo subpackage +# (if it doesn't, then the rpmbuild ought to fail since the debug-gdb.py +# payload file would be unpackaged) + + +# ====================================================== +# Finally, the changelog: +# ====================================================== + +%changelog +* Thu Mar 29 2018 Charalampos Stratakis - 3.6.5-1 +- Update to 3.6.5 + +* Sat Mar 24 2018 Miro Hrončok - 3.6.4-6 +- Fix broken macro invocation and broken building of C Python extensions +Resolves: rhbz#1560103 + +* Fri Mar 16 2018 Miro Hrončok - 3.6.4-5 +- Add -n option for pathfix.py +Resolves: rhbz#1546990 + +* Thu Mar 15 2018 Miro Hrončok - 3.6.4-4 +- Fix the py_byte_compile macro to work on Python 2 +- Remove the pybytecompile macro file from the flat package +Resolves: rhbz#1484993 + +* Tue Mar 13 2018 Charalampos Stratakis - 3.6.4-3 +- Do not send IP addresses in SNI TLS extension + +* Tue Jan 23 2018 Charalampos Stratakis - 3.6.4-2 +- Restore the PyExc_RecursionErrorInst public symbol + +* Mon Jan 15 2018 Charalampos Stratakis - 3.6.4-1 +- Update to version 3.6.4 + +* Mon Oct 09 2017 Charalampos Stratakis - 3.6.3-2 +- Fix memory corruption due to allocator mix +Resolves: rhbz#1498207 + +* Fri Oct 06 2017 Charalampos Stratakis - 3.6.3-1 +- Update to Python 3.6.3 + +* Fri Sep 29 2017 Miro Hrončok - 3.6.2-8 +- Make the -devel package require redhat-rpm-config +Resolves: rhbz#1496757 + +* Wed Sep 06 2017 Iryna Shcherbina - 3.6.2-7 +- Include `-g` in the flags sent to the linker (LDFLAGS) +Resolves: rhbz#1483222 + +* Mon Aug 28 2017 Michal Cyprian - 3.6.2-6 +- Use python3 style of calling super() without arguments in rpath + patch to prevent recursion in UnixCCompiler subclasses +Resolves: rhbz#1458122 + +* Fri Aug 11 2017 Michal Cyprian - 3.6.2-5 +- Revert "Add --executable option to install.py command" + This enhancement is currently not needed and it can possibly + collide with `pip --editable`option + +* Mon Aug 07 2017 Iryna Shcherbina - 3.6.2-4 +- Fix the "urllib FTP protocol stream injection" vulnerability +Resolves: rhbz#1478916 + +* Tue Aug 01 2017 Tomas Orsava - 3.6.2-3 +- Dropped BuildRequires on db4-devel which was useful for Python 2 (module + bsddb), however, no longer needod for Python 3 +- Tested building Python 3 with and without the dependency, all tests pass and + filelists of resulting RPMs are identical + +* Tue Jul 25 2017 Charalampos Stratakis - 3.6.2-2 +- Make test_asyncio to not depend on the current SIGHUP signal handler. + +* Tue Jul 18 2017 Charalampos Stratakis - 3.6.2-1 +- Update to Python 3.6.2 + +* Tue Jun 27 2017 Charalampos Stratakis - 3.6.1-8 +- Update to the latest upstream implementation of PEP 538 + +* Fri Jun 23 2017 Charalampos Stratakis - 3.6.1-7 +- Fix test_alpn_protocols from test_ssl +- Do not require rebundled setuptools dependencies + +* Tue May 09 2017 Charalampos Stratakis - 3.6.1-6 +- Enable profile guided optimizations for x86_64 and i686 architectures +- Update to a newer implementation of PEP 538 +- Update description to reflect that Python 3 is now the default Python + +* Fri May 05 2017 Charalampos Stratakis - 3.6.1-5 +- Update PEP 538 to the latest upstream implementation + +* Tue Apr 18 2017 Charalampos Stratakis - 3.6.1-4 +- Enable link time optimizations +- Move windows executables to the devel subpackage (rhbz#1426257) + +* Thu Apr 13 2017 Tomas Orsava - 3.6.1-3 +- Rename python3.Xdm-config script from -debug to be arch specific +Resolves: rhbz#1179073 + +* Wed Apr 05 2017 Charalampos Stratakis - 3.6.1-2 +- Install the Makefile in its proper location (rhbz#1438219) + +* Mon Apr 03 2017 Iryna Shcherbina - 3.6.1-1 +- Update to version 3.6.1 final + +* Mon Apr 03 2017 Iryna Shcherbina - 3.6.1-0.1.rc1 +- Update to Python 3.6.1 release candidate 1 +- Add patch 264 to skip a known test failure on aarch64 + +* Tue Mar 21 2017 Tomas Orsava - 3.6.0-22 +- Fix syntax error in %%py_byte_compile macro (rhbz#1433569) + +* Fri Mar 10 2017 Charalampos Stratakis - 3.6.0-21 +- Use proper command line parsing in _testembed +- Backport of PEP 538: Coercing the legacy C locale to a UTF-8 based locale + https://fedoraproject.org/wiki/Changes/python3_c.utf-8_locale + +* Mon Feb 27 2017 Charalampos Stratakis - 3.6.0-20 +- Add desktop entry and appdata.xml file for IDLE 3 (rhbz#1392049) + +* Fri Feb 24 2017 Michal Cyprian - 3.6.0-19 +- Revert "Set values of prefix and exec_prefix to /usr/local for + /usr/bin/python* executables..." to prevent build failures + of packages using alternate build tools + +* Tue Feb 21 2017 Michal Cyprian - 3.6.0-18 +- Set values of prefix and exec_prefix to /usr/local for + /usr/bin/python* executables +- Use new %%_module_build macro + +* Fri Feb 17 2017 Michal Cyprian - 3.6.0-13 +- Add --executable option to install.py command + +* Wed Feb 15 2017 Charalampos Stratakis - 3.6.0-12 +- BuildRequire the new dependencies of setuptools when rewheel mode is enabled +in order for the virtualenvs to work properly + +* Sat Feb 11 2017 Fedora Release Engineering - 3.6.0-11 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_26_Mass_Rebuild + +* Wed Feb 01 2017 Stephen Gallagher - 3.6.0-10 +- Add missing %%license macro + +* Thu Jan 26 2017 Tomas Orsava - 3.6.0-9 +- Modify the runtime dependency of python3-libs on system-python-libs again, + because previous attempt didn't work properly with dnf resolving mechanism + +* Wed Jan 25 2017 Tomas Orsava - 3.6.0-8 +- Modify the runtime dependency of python3-libs on system-python-libs to use + just the version and release number, but not the dist tag due to Modularity + +* Mon Jan 16 2017 Charalampos Stratakis - 3.6.0-7 +- Fix error check, so that Random.seed actually uses OS randomness (rhbz#1412275) +- Skip test_aead_aes_gcm during rpmbuild + +* Thu Jan 12 2017 Igor Gnatenko - 3.6.0-6 +- Rebuild for readline 7.x + +* Tue Jan 10 2017 Charalampos Stratakis - 3.6.0-5 +- Require glibc >= 2.24.90-26 for system-python-libs (rhbz#1410644) + +* Mon Jan 09 2017 Charalampos Stratakis - 3.6.0-4 +- Define HAVE_LONG_LONG as 1 for backwards compatibility + +* Thu Jan 05 2017 Miro Hrončok - 3.6.0-3 +- Don't blow up on EL7 kernel (random generator) (rhbz#1410175) + +* Tue Dec 27 2016 Charalampos Stratakis - 3.6.0-1 +- Update to Python 3.6.0 final + +* Fri Dec 09 2016 Charalampos Stratakis - 3.6.0-0.6.rc1 +- Enable rewheel + +* Wed Dec 07 2016 Charalampos Stratakis - 3.6.0-0.5.rc1 +- Update to Python 3.6.0 release candidate 1 + +* Mon Dec 05 2016 Charalampos Stratakis - 3.6.0-0.4.b4 +- Update to Python 3.6.0 beta 4 + +* Mon Dec 05 2016 Charalampos Stratakis - 3.5.2-7 +- Set to work with pip version 9.0.1 + +* Wed Oct 12 2016 Charalampos Stratakis - 3.5.2-6 +- Use proper patch numbering and base upstream branch for +porting ssl and hashlib modules to OpenSSL 1.1.0 +- Drop hashlib patch for now +- Add riscv64 arch to 64bit and no-valgrind arches + +* Tue Oct 11 2016 Tomáš Mráz - 3.5.2-5 +- Make it build with OpenSSL-1.1.0 based on upstream patch + +* Wed Sep 14 2016 Charalampos Stratakis - 3.5.2-4 +- Obsolete and Provide python35 package + +* Mon Sep 12 2016 Charalampos Stratakis - 3.5.2-3 +- Update %py_byte_compile macro +- Remove unused configure flags (rhbz#1374357) + +* Fri Sep 09 2016 Tomas Orsava - 3.5.2-2 +- Updated .pyc 'bytecompilation with the newly installed interpreter' to also + recompile optimized .pyc files +- Removed .pyo 'bytecompilation with the newly installed interpreter', as .pyo + files are no more +- Resolves rhbz#1373635 + +* Mon Aug 15 2016 Tomas Orsava - 3.5.2-1 +- Rebased to version 3.5.2 +- Set to work with pip version 8.1.2 +- Removed patches 207, 237, 241 as fixes are already contained in Python 3.5.2 +- Removed arch or environment specific patches 194, 196, 203, and 208 + as test builds indicate they are no longer needed +- Updated patches 102, 146, and 242 to work with the new Python codebase +- Removed patches 200, 201, 5000 which weren't even being applied + +* Tue Aug 09 2016 Charalampos Stratakis - 3.5.1-15 +- Fix for CVE-2016-1000110 HTTPoxy attack +- SPEC file cleanup + +* Mon Aug 01 2016 Michal Toman - 3.5.1-14 +- Build properly on MIPS + +* Tue Jul 19 2016 Fedora Release Engineering - 3.5.1-13 +- https://fedoraproject.org/wiki/Changes/Automatic_Provides_for_Python_RPM_Packages + +* Fri Jul 08 2016 Charalampos Stratakis - 3.5.1-12 +- Refactor patch for properly fixing CVE-2016-5636 + +* Fri Jul 08 2016 Charalampos Stratakis - 3.5.1-11 +- Fix test_pyexpat failure with Expat version of 2.2.0 + +* Fri Jul 08 2016 Miro Hrončok - 3.5.1-10 +- Move xml module to system-python-libs + +* Thu Jun 16 2016 Tomas Orsava - 3.5.1-9 +- Fix for: CVE-2016-0772 python: smtplib StartTLS stripping attack +- Raise an error when STARTTLS fails +- rhbz#1303647: https://bugzilla.redhat.com/show_bug.cgi?id=1303647 +- rhbz#1346345: https://bugzilla.redhat.com/show_bug.cgi?id=1346345 +- Fixed upstream: https://hg.python.org/cpython/rev/d590114c2394 + +* Mon Jun 13 2016 Charalampos Stratakis - 3.5.1-8 +- Added patch for fixing possible integer overflow and heap corruption in zipimporter.get_data() + +* Fri Mar 04 2016 Miro Hrončok - 3.5.1-7 +- Move distutils to system-python-libs + +* Wed Feb 24 2016 Robert Kuska - 3.5.1-6 +- Provide python3-enum34 + +* Fri Feb 19 2016 Miro Hrončok - 3.5.1-5 +- Provide System Python packages and macros + +* Thu Feb 04 2016 Fedora Release Engineering - 3.5.1-4 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_24_Mass_Rebuild + +* Wed Jan 13 2016 Orion Poplwski - 3.5.1-2 +- Drop python3 macros, require python/python3-rpm-macros + +* Mon Dec 14 2015 Robert Kuska - 3.5.1-1 +- Update to 3.5.1 +- Removed patch 199 and 207 (upstream) + +* Sun Nov 15 2015 Robert Kuska - 3.5.0-5 +- Remove versioned libpython from devel package + +* Fri Nov 13 2015 Than Ngo 3.5.0-4 +- add correct arch for ppc64/ppc64le to fix build failure + +* Wed Nov 11 2015 Robert Kuska - 3.5.0-3 +- Hide the private _Py_atomic_xxx symbols from public header + +* Wed Oct 14 2015 Robert Kuska - 3.5.0-2 +- Rebuild with wheel set to 1 + +* Tue Sep 15 2015 Matej Stuchlik - 3.5.0-1 +- Update to 3.5.0 + +* Mon Jun 29 2015 Thomas Spura - 3.4.3-4 +- python3-devel: Require python-macros for version independant macros such as + python_provide. See fpc#281 and fpc#534. + +* Thu Jun 18 2015 Fedora Release Engineering - 3.4.3-3 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_23_Mass_Rebuild + +* Wed Jun 17 2015 Matej Stuchlik - 3.4.3-4 +- Use 1024bit DH key in test_ssl +- Use -O0 when compiling -debug build +- Update pip version variable to the version we actually ship + +* Wed Jun 17 2015 Matej Stuchlik - 3.4.3-3 +- Make relocating Python by changing _prefix actually work +Resolves: rhbz#1231801 + +* Mon May 4 2015 Peter Robinson 3.4.3-2 +- Disable test_gdb on aarch64 (rhbz#1196181), it joins all other non x86 arches + +* Thu Mar 12 2015 Matej Stuchlik - 3.4.3-1 +- Updated to 3.4.3 +- BuildPython now accepts additional build options +- Temporarily disabled test_gdb on arm (rhbz#1196181) + +* Wed Feb 25 2015 Matej Stuchlik - 3.4.2-7 +- Fixed undefined behaviour in faulthandler which caused test to hang on x86_64 + (http://bugs.python.org/issue23433) + +* Sat Feb 21 2015 Till Maas - 3.4.2-6 +- Rebuilt for Fedora 23 Change + https://fedoraproject.org/wiki/Changes/Harden_all_packages_with_position-independent_code + +* Tue Feb 17 2015 Ville Skyttä - 3.4.2-5 +- Own systemtap dirs (#710733) + +* Mon Jan 12 2015 Dan Horák - 3.4.2-4 +- build with valgrind on ppc64le +- disable test_gdb on s390(x) until rhbz#1181034 is resolved + +* Tue Dec 16 2014 Robert Kuska - 3.4.2-3 +- New patches: 170 (gc asserts), 200 (gettext headers), + 201 (gdbm memory leak) + +* Thu Dec 11 2014 Robert Kuska - 3.4.2-2 +- OpenSSL disabled SSLv3 in SSLv23 method + +* Thu Nov 13 2014 Matej Stuchlik - 3.4.2-1 +- Update to 3.4.2 +- Refreshed patches: 156 (gdb autoload) +- Removed: 195 (Werror declaration), 197 (CVE-2014-4650) + +* Mon Nov 03 2014 Slavek Kabrda - 3.4.1-16 +- Fix CVE-2014-4650 - CGIHTTPServer URL handling +Resolves: rhbz#1113529 + +* Sun Sep 07 2014 Karsten Hopp 3.4.1-15 +- exclude test_gdb on ppc* (rhbz#1132488) + +* Thu Aug 21 2014 Slavek Kabrda - 3.4.1-14 +- Update rewheel patch with fix from https://github.com/bkabrda/rewheel/pull/1 + +* Sun Aug 17 2014 Fedora Release Engineering - 3.4.1-13 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_21_22_Mass_Rebuild + +* Sun Jun 8 2014 Peter Robinson 3.4.1-12 +- aarch64 has valgrind, just list those that don't support it + +* Sun Jun 08 2014 Fedora Release Engineering - 3.4.1-11 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_21_Mass_Rebuild + +* Wed Jun 04 2014 Karsten Hopp 3.4.1-10 +- bump release and rebuild to link with the correct tcl/tk libs on ppcle + +* Tue Jun 03 2014 Matej Stuchlik - 3.4.1-9 +- Change paths to bundled projects in rewheel patch + +* Fri May 30 2014 Miro Hrončok - 3.4.1-8 +- In config script, use uname -m to write the arch + +* Thu May 29 2014 Dan Horák - 3.4.1-7 +- update the arch list where valgrind exists - %%power64 includes also + ppc64le which is not supported yet + +* Thu May 29 2014 Miro Hrončok - 3.4.1-6 +- Forward arguments to the arch specific config script +Resolves: rhbz#1102683 + +* Wed May 28 2014 Miro Hrončok - 3.4.1-5 +- Rename python3.Xm-config script to arch specific. +Resolves: rhbz#1091815 + +* Tue May 27 2014 Bohuslav Kabrda - 3.4.1-4 +- Use python3-*, not python-* runtime requires on setuptools and pip +- rebuild for tcl-8.6 + +* Tue May 27 2014 Matej Stuchlik - 3.4.1-3 +- Update the rewheel module + +* Mon May 26 2014 Miro Hrončok - 3.4.1-2 +- Fix multilib dependencies. +Resolves: rhbz#1091815 + +* Sun May 25 2014 Matej Stuchlik - 3.4.1-1 +- Update to Python 3.4.1 + +* Sun May 25 2014 Matej Stuchlik - 3.4.0-8 +- Fix test_gdb failure on ppc64le +Resolves: rhbz#1095355 + +* Thu May 22 2014 Miro Hrončok - 3.4.0-7 +- Add macro %%python3_version_nodots + +* Sun May 18 2014 Matej Stuchlik - 3.4.0-6 +- Disable test_faulthandler, test_gdb on aarch64 +Resolves: rhbz#1045193 + +* Fri May 16 2014 Matej Stuchlik - 3.4.0-5 +- Don't add Werror=declaration-after-statement for extension + modules through setup.py (PyBT#21121) + +* Mon May 12 2014 Matej Stuchlik - 3.4.0-4 +- Add setuptools and pip to Requires + +* Tue Apr 29 2014 Matej Stuchlik - 3.4.0-3 +- Point __os_install_post to correct brp-* files + +* Tue Apr 15 2014 Matej Stuchlik - 3.4.0-2 +- Temporarily disable tests requiring SIGHUP (rhbz#1088233) + +* Tue Apr 15 2014 Matej Stuchlik - 3.4.0-1 +- Update to Python 3.4 final +- Add patch adding the rewheel module +- Merge patches from master + +* Wed Jan 08 2014 Bohuslav Kabrda - 3.4.0-0.1.b2 +- Update to Python 3.4 beta 2. +- Refreshed patches: 55 (systemtap), 146 (hashlib-fips), 154 (test_gdb noise) +- Dropped patches: 114 (statvfs constants), 177 (platform unicode) + +* Mon Nov 25 2013 Bohuslav Kabrda - 3.4.0-0.1.b1 +- Update to Python 3.4 beta 1. +- Refreshed patches: 102 (lib64), 111 (no static lib), 125 (less verbose COUNT +ALLOCS), 141 (fix COUNT_ALLOCS in test_module), 146 (hashlib fips), +157 (UID+GID overflows), 173 (ENOPROTOOPT in bind_port) +- Removed patch 00187 (remove pthread atfork; upstreamed) + +* Mon Nov 04 2013 Bohuslav Kabrda - 3.4.0-0.1.a4 +- Update to Python 3.4 alpha 4. +- Refreshed patches: 55 (systemtap), 102 (lib64), 111 (no static lib), +114 (statvfs flags), 132 (unittest rpmbuild hooks), 134 (fix COUNT_ALLOCS in +test_sys), 143 (tsc on ppc64), 146 (hashlib fips), 153 (test gdb noise), +157 (UID+GID overflows), 173 (ENOPROTOOPT in bind_port), 186 (dont raise +from py_compile) +- Removed patches: 129 (test_subprocess nonreadable dir - no longer fails in +Koji), 142 (the mock issue that caused this is fixed) +- Added patch 187 (remove thread atfork) - will be in next version +- Refreshed script for checking pyc and pyo timestamps with new ignored files. +- The fips patch is disabled for now until upstream makes a final decision +what to do with sha3 implementation for 3.4.0. + +* Wed Oct 30 2013 Bohuslav Kabrda - 3.3.2-7 +- Bytecompile all *.py files properly during build (rhbz#1023607) + +* Fri Aug 23 2013 Matej Stuchlik - 3.3.2-6 +- Added fix for CVE-2013-4238 (rhbz#996399) + +* Fri Jul 26 2013 Dennis Gilmore - 3.3.2-5 +- fix up indentation in arm patch + +* Fri Jul 26 2013 Dennis Gilmore - 3.3.2-4 +- disable a test that fails on arm +- enable valgrind support on arm arches + +* Tue Jul 02 2013 Bohuslav Kabrda - 3.3.2-3 +- Fix build with libffi containing multilib wrapper for ffi.h (rhbz#979696). + +* Mon May 20 2013 Bohuslav Kabrda - 3.3.2-2 +- Add patch for CVE-2013-2099 (rhbz#963261). + +* Thu May 16 2013 Bohuslav Kabrda - 3.3.2-1 +- Updated to Python 3.3.2. +- Refreshed patches: 153 (gdb test noise) +- Dropped patches: 175 (configure -Wformat, fixed upstream), 182 (gdb +test threads) +- Synced patch numbers with python.spec. + +* Thu May 9 2013 David Malcolm - 3.3.1-4 +- fix test.test_gdb.PyBtTests.test_threads on ppc64 (patch 181; rhbz#960010) + +* Thu May 02 2013 Bohuslav Kabrda - 3.3.1-3 +- Add patch that enables building on ppc64p7 (replace the sed, so that +we get consistent with python2 spec and it's more obvious that we're doing it. + +* Wed Apr 24 2013 Bohuslav Kabrda - 3.3.1-2 +- Add fix for gdb tests failing on arm, rhbz#951802. + +* Tue Apr 09 2013 Bohuslav Kabrda - 3.3.1-1 +- Updated to Python 3.3.1. +- Refreshed patches: 55 (systemtap), 111 (no static lib), 146 (hashlib fips), +153 (fix test_gdb noise), 157 (uid, gid overflow - fixed upstream, just +keeping few more downstream tests) +- Removed patches: 3 (audiotest.au made it to upstream tarball) +- Removed workaround for http://bugs.python.org/issue14774, discussed in +http://bugs.python.org/issue15298 and fixed in revision 24d52d3060e8. + +* Mon Mar 25 2013 David Malcolm - 3.3.0-10 +- fix gcc 4.8 incompatibility (rhbz#927358); regenerate autotool intermediates + +* Mon Mar 25 2013 David Malcolm - 3.3.0-9 +- renumber patches to keep them in sync with python.spec + +* Fri Mar 15 2013 Toshio Kuratomi - 3.3.0-8 +- Fix error in platform.platform() when non-ascii byte strings are decoded to + unicode (rhbz#922149) + +* Thu Mar 14 2013 Toshio Kuratomi - 3.3.0-7 +- Fix up shared library extension (rhbz#889784) + +* Thu Mar 07 2013 Karsten Hopp 3.3.0-6 +- add ppc64p7 build target, optimized for Power7 + +* Mon Mar 4 2013 David Malcolm - 3.3.0-5 +- add workaround for ENOPROTOOPT seen running selftests in Koji +(rhbz#913732) + +* Mon Mar 4 2013 David Malcolm - 3.3.0-4 +- remove config flag from /etc/rpm/macros.{python3|pybytecompile} + +* Mon Feb 11 2013 David Malcolm - 3.3.0-3 +- add aarch64 (rhbz#909783) + +* Thu Nov 29 2012 David Malcolm - 3.3.0-2 +- add BR on bluez-libs-devel (rhbz#879720) + +* Sat Sep 29 2012 David Malcolm - 3.3.0-1 +- 3.3.0rc3 -> 3.3.0; drop alphatag + +* Mon Sep 24 2012 David Malcolm - 3.3.0-0.6.rc3 +- 3.3.0rc2 -> 3.3.0rc3 + +* Mon Sep 10 2012 David Malcolm - 3.3.0-0.5.rc2 +- 3.3.0rc1 -> 3.3.0rc2; refresh patch 55 + +* Mon Aug 27 2012 David Malcolm - 3.3.0-0.4.rc1 +- 3.3.0b2 -> 3.3.0rc1; refresh patches 3, 55 + +* Mon Aug 13 2012 David Malcolm - 3.3.0-0.3.b2 +- 3.3b1 -> 3.3b2; drop upstreamed patch 152; refresh patches 3, 102, 111, +134, 153, 160; regenenerate autotools patch; rework systemtap patch to work +correctly when LANG=C (patch 55); importlib.test was moved to +test.test_importlib upstream + +* Mon Aug 13 2012 Karsten Hopp 3.3.0-0.2.b1 +- disable some failing checks on PPC* (rhbz#846849) + +* Fri Aug 3 2012 David Malcolm - 3.3.0-0.1.b1 +- 3.2 -> 3.3: https://fedoraproject.org/wiki/Features/Python_3.3 +- 3.3.0b1: refresh patches 3, 55, 102, 111, 113, 114, 134, 157; drop upstream +patch 147; regenenerate autotools patch; drop "--with-wide-unicode" from +configure (PEP 393); "plat-linux2" -> "plat-linux" (upstream issue 12326); +"bz2" -> "_bz2" and "crypt" -> "_crypt"; egg-info files are no longer shipped +for stdlib (upstream issues 10645 and 12218); email/test moved to +test/test_email; add /usr/bin/pyvenv[-3.3] and venv module (PEP 405); add +_decimal and _lzma modules; make collections modules explicit in payload again +(upstream issue 11085); add _testbuffer module to tests subpackage (added in +upstream commit 3f9b3b6f7ff0); fix test failures (patches 160 and 161); +workaround erroneously shared _sysconfigdata.py upstream issue #14774; fix +distutils.sysconfig traceback (patch 162); add BuildRequires: xz-devel (for +_lzma module); skip some tests within test_socket (patch 163) + +* Sat Jul 21 2012 Fedora Release Engineering - 3.2.3-11 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_18_Mass_Rebuild + +* Fri Jul 20 2012 David Malcolm - 3.3.0-0.1.b1 + +* Fri Jun 22 2012 David Malcolm - 3.2.3-10 +- use macro for power64 (rhbz#834653) + +* Mon Jun 18 2012 David Malcolm - 3.2.3-9 +- fix missing include in uid/gid handling patch (patch 157; rhbz#830405) + +* Wed May 30 2012 Bohuslav Kabrda - 3.2.3-8 +- fix tapset for debug build + +* Tue May 15 2012 David Malcolm - 3.2.3-7 +- update uid/gid handling to avoid int overflows seen with uid/gid +values >= 2^31 on 32-bit architectures (patch 157; rhbz#697470) + +* Fri May 4 2012 David Malcolm - 3.2.3-6 +- renumber autotools patch from 300 to 5000 +- specfile cleanups + +* Mon Apr 30 2012 David Malcolm - 3.2.3-5 +- fix test_gdb.py (patch 156; rhbz#817072) + +* Fri Apr 20 2012 David Malcolm - 3.2.3-4 +- avoid allocating thunks in ctypes unless absolutely necessary, to avoid +generating SELinux denials on "import ctypes" and "import uuid" when embedding +Python within httpd (patch 155; rhbz#814391) + +* Fri Apr 20 2012 David Malcolm - 3.2.3-3 +- add explicit version requirements on expat to avoid linkage problems with +XML_SetHashSalt + +* Thu Apr 12 2012 David Malcolm - 3.2.3-2 +- fix test_gdb (patch 153) + +* Wed Apr 11 2012 David Malcolm - 3.2.3-1 +- 3.2.3; refresh patch 102 (lib64); drop upstream patches 148 (gdbm magic +values), 149 (__pycache__ fix); add patch 152 (test_gdb regex) + +* Thu Feb 9 2012 Thomas Spura - 3.2.2-13 +- use newly installed python for byte compiling (now for real) + +* Sun Feb 5 2012 Thomas Spura - 3.2.2-12 +- use newly installed python for byte compiling (#787498) + +* Wed Jan 4 2012 Ville Skyttä - 3.2.2-11 +- Build with $RPM_LD_FLAGS (#756863). +- Use xz-compressed source tarball. + +* Wed Dec 07 2011 Karsten Hopp 3.2.2-10 +- disable rAssertAlmostEqual in test_cmath on PPC (#750811) + +* Mon Oct 17 2011 Rex Dieter - 3.2.2-9 +- python3-devel missing autogenerated pkgconfig() provides (#746751) + +* Mon Oct 10 2011 David Malcolm - 3.2.2-8 +- cherrypick fix for distutils not using __pycache__ when byte-compiling +files (rhbz#722578) + +* Fri Sep 30 2011 David Malcolm - 3.2.2-7 +- re-enable gdbm (patch 148; rhbz#742242) + +* Fri Sep 16 2011 David Malcolm - 3.2.2-6 +- add a sys._debugmallocstats() function (patch 147) + +* Wed Sep 14 2011 David Malcolm - 3.2.2-5 +- support OpenSSL FIPS mode in _hashlib and hashlib; don't build the _md5 and +_sha* modules, relying on _hashlib in hashlib (rhbz#563986; patch 146) + +* Tue Sep 13 2011 David Malcolm - 3.2.2-4 +- disable gdbm module to prepare for gdbm soname bump + +* Mon Sep 12 2011 David Malcolm - 3.2.2-3 +- renumber and rename patches for consistency with python.spec (8 to 55, 106 +to 104, 6 to 111, 104 to 113, 105 to 114, 125, 131, 130 to 143) + +* Sat Sep 10 2011 David Malcolm - 3.2.2-2 +- rewrite of "check", introducing downstream-only hooks for skipping specific +cases in an rpmbuild (patch 132), and fixing/skipping failing tests in a more +fine-grained manner than before; (patches 106, 133-142 sparsely, moving +patches for consistency with python.spec: 128 to 134, 126 to 135, 127 to 141) + +* Tue Sep 6 2011 David Malcolm - 3.2.2-1 +- 3.2.2 + +* Thu Sep 1 2011 David Malcolm - 3.2.1-7 +- run selftests with "--verbose" +- disable parts of test_io on ppc (rhbz#732998) + +* Wed Aug 31 2011 David Malcolm - 3.2.1-6 +- use "--findleaks --verbose3" when running test suite + +* Tue Aug 23 2011 David Malcolm - 3.2.1-5 +- re-enable and fix the --with-tsc option on ppc64, and rework it on 32-bit +ppc to avoid aliasing violations (patch 130; rhbz#698726) + +* Tue Aug 23 2011 David Malcolm - 3.2.1-4 +- don't use --with-tsc on ppc64 debug builds (rhbz#698726) + +* Thu Aug 18 2011 David Malcolm - 3.2.1-3 +- add %%python3_version to the rpm macros (rhbz#719082) + +* Mon Jul 11 2011 Dennis Gilmore - 3.2.1-2 +- disable some tests on sparc arches + +* Mon Jul 11 2011 David Malcolm - 3.2.1-1 +- 3.2.1; refresh lib64 patch (102), subprocess unit test patch (129), disabling +of static library build (due to Modules/_testembed; patch 6), autotool +intermediates (patch 300) + +* Fri Jul 8 2011 David Malcolm - 3.2-5 +- use the gdb hooks from the upstream tarball, rather than keeping our own copy + +* Fri Jul 8 2011 David Malcolm - 3.2-4 +- don't run test_openpty and test_pty in %%check + +* Fri Jul 8 2011 David Malcolm - 3.2-3 +- cleanup of BuildRequires; add comment headings to specfile sections + +* Tue Apr 19 2011 David Malcolm - 3.2-2 +- fix the libpython.stp systemtap tapset (rhbz#697730) + +* Mon Feb 21 2011 David Malcolm - 3.2-1 +- 3.2 +- drop alphatag +- regenerate autotool patch + +* Mon Feb 14 2011 David Malcolm - 3.2-0.13.rc3 +- add a /usr/bin/python3-debug symlink within the debug subpackage + +* Mon Feb 14 2011 David Malcolm - 3.2-0.12.rc3 +- 3.2rc3 +- regenerate autotool patch + +* Wed Feb 09 2011 Fedora Release Engineering - 3.2-0.11.rc2 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_15_Mass_Rebuild + +* Mon Jan 31 2011 David Malcolm - 3.2-0.10.rc2 +- 3.2rc2 + +* Mon Jan 17 2011 David Malcolm - 3.2-0.9.rc1 +- 3.2rc1 +- rework patch 6 (static lib removal) +- remove upstreamed patch 130 (ppc debug build) +- regenerate patch 300 (autotool intermediates) +- updated packaging to reflect upstream rewrite of "Demo" (issue 7962) +- added libpython3.so and 2to3-3.2 + +* Wed Jan 5 2011 David Malcolm - 3.2-0.8.b2 +- set EXTRA_CFLAGS to our CFLAGS, rather than overriding OPT, fixing a linker +error with dynamic annotations (when configured using --with-valgrind) +- fix the ppc build of the debug configuration (patch 130; rhbz#661510) + +* Tue Jan 4 2011 David Malcolm - 3.2-0.7.b2 +- add --with-valgrind to configuration (on architectures that support this) + +* Wed Dec 29 2010 David Malcolm - 3.2-0.6.b2 +- work around test_subprocess failure seen in koji (patch 129) + +* Tue Dec 28 2010 David Malcolm - 3.2-0.5.b2 +- 3.2b2 +- rework patch 3 (removal of mimeaudio tests), patch 6 (no static libs), +patch 8 (systemtap), patch 102 (lib64) +- remove patch 4 (rendered redundant by upstream r85537), patch 103 (PEP 3149), +patch 110 (upstreamed expat fix), patch 111 (parallel build fix for grammar +fixed upstream) +- regenerate patch 300 (autotool intermediates) +- workaround COUNT_ALLOCS weakref issues in test suite (patch 126, patch 127, +patch 128) +- stop using runtest.sh in %%check (dropped by upstream), replacing with +regrtest; fixup list of failing tests +- introduce "pyshortver", "SOABI_optimized" and "SOABI_debug" macros +- rework manifests of shared libraries to use "SOABI_" macros, reflecting +PEP 3149 +- drop itertools, operator and _collections modules from the manifests as py3k +commit r84058 moved these inside libpython; json/tests moved to test/json_tests +- move turtle code into the tkinter subpackage + +* Wed Nov 17 2010 David Malcolm - 3.2-0.5.a1 +- fix sysconfig to not rely on the -devel subpackage (rhbz#653058) + +* Thu Sep 9 2010 David Malcolm - 3.2-0.4.a1 +- move most of the content of the core package to the libs subpackage, given +that the libs aren't meaningfully usable without the standard libraries + +* Wed Sep 8 2010 David Malcolm - 3.2-0.3.a1 +- Move test.support to core package (rhbz#596258) +- Add various missing __pycache__ directories to payload + +* Sun Aug 22 2010 Toshio Kuratomi - 3.2-0.2.a1 +- Add __pycache__ directory for site-packages + +* Sun Aug 22 2010 Thomas Spura - 3.2-0.1.a1 +- on 64bit "stdlib" was still "/usr/lib/python*" (modify *lib64.patch) +- make find-provides-without-python-sonames.sh 64bit aware + +* Sat Aug 21 2010 David Malcolm - 3.2-0.0.a1 +- 3.2a1; add alphatag +- rework %%files in the light of PEP 3147 (__pycache__) +- drop our configuration patch to Setup.dist (patch 0): setup.py should do a +better job of things, and the %%files explicitly lists our modules (r82746 +appears to break the old way of doing things). This leads to various modules +changing from "foomodule.so" to "foo.so". It also leads to the optimized build +dropping the _sha1, _sha256 and _sha512 modules, but these are provided by +_hashlib; _weakref becomes a builtin module; xxsubtype goes away (it's only for +testing/devel purposes) +- fixup patches 3, 4, 6, 8, 102, 103, 105, 111 for the rebase +- remove upstream patches: 7 (system expat), 106, 107, 108 (audioop reformat +plus CVE-2010-1634 and CVE-2010-2089), 109 (CVE-2008-5983) +- add machinery for rebuilding "configure" and friends, using the correct +version of autoconf (patch 300) +- patch the debug build's usage of COUNT_ALLOCS to be less verbose (patch 125) +- "modulator" was removed upstream +- drop "-b" from patch applications affecting .py files to avoid littering the +installation tree + +* Thu Aug 19 2010 Toshio Kuratomi - 3.1.2-13 +- Turn on computed-gotos. +- Fix for parallel make and graminit.c + +* Fri Jul 2 2010 David Malcolm - 3.1.2-12 +- rebuild + +* Fri Jul 2 2010 David Malcolm - 3.1.2-11 +- Fix an incompatibility between pyexpat and the system expat-2.0.1 that led to +a segfault running test_pyexpat.py (patch 110; upstream issue 9054; rhbz#610312) + +* Fri Jun 4 2010 David Malcolm - 3.1.2-10 +- ensure that the compiler is invoked with "-fwrapv" (rhbz#594819) +- reformat whitespace in audioop.c (patch 106) +- CVE-2010-1634: fix various integer overflow checks in the audioop +module (patch 107) +- CVE-2010-2089: further checks within the audioop module (patch 108) +- CVE-2008-5983: the new PySys_SetArgvEx entry point from r81399 (patch 109) + +* Thu May 27 2010 Dan Horák - 3.1.2-9 +- reading the timestamp counter is available only on some arches (see Python/ceval.c) + +* Wed May 26 2010 David Malcolm - 3.1.2-8 +- add flags for statvfs.f_flag to the constant list in posixmodule (i.e. "os") +(patch 105) + +* Tue May 25 2010 David Malcolm - 3.1.2-7 +- add configure-time support for COUNT_ALLOCS and CALL_PROFILE debug options +(patch 104); enable them and the WITH_TSC option within the debug build + +* Mon May 24 2010 David Malcolm - 3.1.2-6 +- build and install two different configurations of Python 3: debug and +standard, packaging the debug build in a new "python3-debug" subpackage +(patch 103) + +* Tue Apr 13 2010 David Malcolm - 3.1.2-5 +- exclude test_http_cookies when running selftests, due to hang seen on +http://koji.fedoraproject.org/koji/taskinfo?taskID=2088463 (cancelled after +11 hours) +- update python-gdb.py from v5 to py3k version submitted upstream + +* Wed Mar 31 2010 David Malcolm - 3.1.2-4 +- update python-gdb.py from v4 to v5 (improving performance and stability, +adding commands) + +* Thu Mar 25 2010 David Malcolm - 3.1.2-3 +- update python-gdb.py from v3 to v4 (fixing infinite recursion on reference +cycles and tracebacks on bytes 0x80-0xff in strings, adding handlers for sets +and exceptions) + +* Wed Mar 24 2010 David Malcolm - 3.1.2-2 +- refresh gdb hooks to v3 (reworking how they are packaged) + +* Sun Mar 21 2010 David Malcolm - 3.1.2-1 +- update to 3.1.2: http://www.python.org/download/releases/3.1.2/ +- drop upstreamed patch 2 (.pyc permissions handling) +- drop upstream patch 5 (fix for the test_tk and test_ttk_* selftests) +- drop upstreamed patch 200 (path-fixing script) + +* Sat Mar 20 2010 David Malcolm - 3.1.1-28 +- fix typo in libpython.stp (rhbz:575336) + +* Fri Mar 12 2010 David Malcolm - 3.1.1-27 +- add pyfuntop.stp example (source 7) +- convert usage of $$RPM_BUILD_ROOT to %%{buildroot} throughout, for +consistency with python.spec + +* Mon Feb 15 2010 Thomas Spura - 3.1.1-26 +- rebuild for new package of redhat-rpm-config (rhbz:564527) +- use 'install -p' when running 'make install' + +* Fri Feb 12 2010 David Malcolm - 3.1.1-25 +- split configure options into multiple lines for easy of editing +- add systemtap static markers (wcohen, mjw, dmalcolm; patch 8), a systemtap +tapset defining "python.function.entry" and "python.function.return" to make +the markers easy to use (dmalcolm; source 5), and an example of using the +tapset to the docs (dmalcolm; source 6) (rhbz:545179) + +* Mon Feb 8 2010 David Malcolm - 3.1.1-24 +- move the -gdb.py file from %%{_libdir}/INSTSONAME-gdb.py to +%%{_prefix}/lib/debug/%%{_libdir}/INSTSONAME.debug-gdb.py to avoid noise from +ldconfig (bug 562980), and which should also ensure it becomes part of the +debuginfo subpackage, rather than the libs subpackage +- introduce %%{py_SOVERSION} and %%{py_INSTSONAME} to reflect the upstream +configure script, and to avoid fragile scripts that try to figure this out +dynamically (e.g. for the -gdb.py change) + +* Mon Feb 8 2010 David Malcolm - 3.1.1-23 +- add gdb hooks for easier debugging (Source 4) + +* Thu Jan 28 2010 David Malcolm - 3.1.1-22 +- update python-3.1.1-config.patch to remove downstream customization of build +of pyexpat and elementtree modules +- add patch adapted from upstream (patch 7) to add support for building against +system expat; add --with-system-expat to "configure" invocation +- remove embedded copies of expat and zlib from source tree during "prep" + +* Mon Jan 25 2010 David Malcolm - 3.1.1-21 +- introduce %%{dynload_dir} macro +- explicitly list all lib-dynload files, rather than dynamically gathering the +payload into a temporary text file, so that we can be sure what we are +shipping +- introduce a macros.pybytecompile source file, to help with packaging python3 +modules (Source3; written by Toshio) +- rename "2to3-3" to "python3-2to3" to better reflect python 3 module packaging +plans + +* Mon Jan 25 2010 David Malcolm - 3.1.1-20 +- change python-3.1.1-config.patch to remove our downstream change to curses +configuration in Modules/Setup.dist, so that the curses modules are built using +setup.py with the downstream default (linking against libncursesw.so, rather +than libncurses.so), rather than within the Makefile; add a test to %%install +to verify the dso files that the curses module is linked against the correct +DSO (bug 539917; changes _cursesmodule.so -> _curses.so) + +* Fri Jan 22 2010 David Malcolm - 3.1.1-19 +- add %%py3dir macro to macros.python3 (to be used during unified python 2/3 +builds for setting up the python3 copy of the source tree) + +* Wed Jan 20 2010 David Malcolm - 3.1.1-18 +- move lib2to3 from -tools subpackage to main package (bug 556667) + +* Sun Jan 17 2010 David Malcolm - 3.1.1-17 +- patch Makefile.pre.in to avoid building static library (patch 6, bug 556092) + +* Fri Jan 15 2010 David Malcolm - 3.1.1-16 +- use the %%{_isa} macro to ensure that the python-devel dependency on python +is for the correct multilib arch (#555943) +- delete bundled copy of libffi to make sure we use the system one + +* Fri Jan 15 2010 David Malcolm - 3.1.1-15 +- fix the URLs output by pydoc so they point at python.org's 3.1 build of the +docs, rather than the 2.6 build + +* Wed Jan 13 2010 David Malcolm - 3.1.1-14 +- replace references to /usr with %%{_prefix}; replace references to +/usr/include with %%{_includedir} (Toshio) + +* Mon Jan 11 2010 David Malcolm - 3.1.1-13 +- fix permission on find-provides-without-python-sonames.sh from 775 to 755 + +* Mon Jan 11 2010 David Malcolm - 3.1.1-12 +- remove build-time requirements on tix and tk, since we already have +build-time requirements on the -devel subpackages for each of these (Thomas +Spura) +- replace usage of %%define with %%global (Thomas Spura) +- remove forcing of CC=gcc as this old workaround for bug 109268 appears to +longer be necessary +- move various test files from the "tools"/"tkinter" subpackages to the "test" +subpackage + +* Thu Jan 7 2010 David Malcolm - 3.1.1-11 +- add %%check section (thanks to Thomas Spura) +- update patch 4 to use correct shebang line +- get rid of stray patch file from buildroot + +* Tue Nov 17 2009 Andrew McNabb - 3.1.1-10 +- switched a few instances of "find |xargs" to "find -exec" for consistency. +- made the description of __os_install_post more accurate. + +* Wed Nov 4 2009 David Malcolm - 3.1.1-9 +- add macros.python3 to the -devel subpackage, containing common macros for use +when packaging python3 modules + +* Tue Nov 3 2009 David Malcolm - 3.1.1-8 +- add a provides of "python(abi)" (see bug 532118) +- fix issues identified by a.badger in package review (bug 526126, comment 39): + - use "3" thoughout metadata, rather than "3.*" + - remove conditional around "pkg-config openssl" + - use standard cleanup of RPM_BUILD_ROOT + - replace hardcoded references to /usr with _prefix macro + - stop removing egg-info files + - use /usr/bin/python3.1 rather than /use/bin/env python3.1 when fixing +up shebang lines + - stop attempting to remove no-longer-present .cvsignore files + - move the post/postun sections above the "files" sections + +* Thu Oct 29 2009 David Malcolm - 3.1.1-7 +- remove commented-away patch 51 (python-2.6-distutils_rpm.patch): the -O1 +flag is used by default in the upstream code +- "Makefile" and the config-32/64.h file are needed by distutils/sysconfig.py +_init_posix(), so we include them in the core package, along with their parent +directories (bug 531901) + +* Tue Oct 27 2009 David Malcolm - 3.1.1-6 +- reword description, based on suggestion by amcnabb +- fix the test_email and test_imp selftests (patch 3 and patch 4 respectively) +- fix the test_tk and test_ttk_* selftests (patch 5) +- fix up the specfile's handling of shebang/perms to avoid corrupting +test_httpservers.py (sed command suggested by amcnabb) + +* Thu Oct 22 2009 David Malcolm - 3.1.1-5 +- fixup importlib/_bootstrap.py so that it correctly handles being unable to +open .pyc files for writing (patch 2, upstream issue 7187) +- actually apply the rpath patch (patch 1) + +* Thu Oct 22 2009 David Malcolm - 3.1.1-4 +- update patch0's setup of the crypt module to link it against libcrypt +- update patch0 to comment "datetimemodule" back out, so that it is built +using setup.py (see Setup, option 3), thus linking it statically against +timemodule.c and thus avoiding a run-time "undefined symbol: +_PyTime_DoubleToTimet" failure on "import datetime" + +* Wed Oct 21 2009 David Malcolm - 3.1.1-3 +- remove executable flag from various files that shouldn't have it +- fix end-of-line encodings +- fix a character encoding + +* Tue Oct 20 2009 David Malcolm - 3.1.1-2 +- disable invocation of brp-python-bytecompile in postprocessing, since +it would be with the wrong version of python (adapted from ivazquez' +python3000 specfile) +- use a custom implementation of __find_provides in order to filter out bogus +provides lines for the various .so modules +- fixup distutils/unixccompiler.py to remove standard library path from rpath +(patch 1, was Patch0 in ivazquez' python3000 specfile) +- split out libraries into a -libs subpackage +- update summaries and descriptions, basing content on ivazquez' specfile +- fixup executable permissions on .py, .xpm and .xbm files, based on work in +ivazquez's specfile +- get rid of DOS batch files +- fixup permissions for shared libraries from non-standard 555 to standard 755 +- move /usr/bin/python*-config to the -devel subpackage +- mark various directories as being documentation + +* Thu Sep 24 2009 Andrew McNabb 3.1.1-1 +- Initial package for Python 3. +