python3 package update to version 3.6.5
Signed-off-by: basebuilder_pel7ppc64bebuilder0 <basebuilder@powerel.org>master
parent
aa26350765
commit
a6d02a533b
|
@ -0,0 +1,822 @@
|
|||
diff -up Python-3.3.0rc2/configure.ac.systemtap Python-3.3.0rc2/configure.ac
|
||||
--- Python-3.3.0rc2/configure.ac.systemtap 2012-09-09 05:11:14.000000000 -0400
|
||||
+++ Python-3.3.0rc2/configure.ac 2012-09-10 09:17:21.114511781 -0400
|
||||
@@ -2678,6 +2678,23 @@ if test "$with_valgrind" != no; then
|
||||
OPT="-DDYNAMIC_ANNOTATIONS_ENABLED=1 $OPT"
|
||||
fi
|
||||
|
||||
+# Check for systemtap support
|
||||
+# On Linux, /usr/bin/dtrace is in fact a shim to SystemTap
|
||||
+AC_MSG_CHECKING([for --with-systemtap])
|
||||
+AC_ARG_WITH([systemtap],
|
||||
+ AC_HELP_STRING([--with(out)-systemtap], [disable/enable SystemTap support]),,
|
||||
+ with_systemtap=no)
|
||||
+AC_MSG_RESULT([$with_systemtap])
|
||||
+if test "$with_systemtap" != no; then
|
||||
+ AC_DEFINE(WITH_SYSTEMTAP, 1,
|
||||
+ [Define if you want to compile in SystemTap support])
|
||||
+ SYSTEMTAPOBJS="Python/pysystemtap.o"
|
||||
+ SYSTEMTAPDEPS="\$(srcdir)/Python/pysystemtap.h"
|
||||
+fi
|
||||
+
|
||||
+AC_SUBST(SYSTEMTAPOBJS)
|
||||
+AC_SUBST(SYSTEMTAPDEPS)
|
||||
+
|
||||
# -I${DLINCLDIR} is added to the compile rule for importdl.o
|
||||
AC_SUBST(DLINCLDIR)
|
||||
DLINCLDIR=.
|
||||
diff -up Python-3.3.0rc2/configure.systemtap Python-3.3.0rc2/configure
|
||||
--- Python-3.3.0rc2/configure.systemtap 2012-09-09 05:11:14.000000000 -0400
|
||||
+++ Python-3.3.0rc2/configure 2012-09-10 09:17:21.116511780 -0400
|
||||
@@ -618,6 +618,8 @@ TRUE
|
||||
MACHDEP_OBJS
|
||||
DYNLOADFILE
|
||||
DLINCLDIR
|
||||
+SYSTEMTAPDEPS
|
||||
+SYSTEMTAPOBJS
|
||||
THREADOBJ
|
||||
LDLAST
|
||||
USE_THREAD_MODULE
|
||||
@@ -779,6 +781,7 @@ with_doc_strings
|
||||
with_tsc
|
||||
with_pymalloc
|
||||
with_valgrind
|
||||
+with_systemtap
|
||||
with_fpectl
|
||||
with_libm
|
||||
with_libc
|
||||
@@ -1456,6 +1459,7 @@ Optional Packages:
|
||||
--with(out)-tsc enable/disable timestamp counter profile
|
||||
--with(out)-pymalloc disable/enable specialized mallocs
|
||||
--with-valgrind Enable Valgrind support
|
||||
+ --with(out)-systemtap disable/enable SystemTap support
|
||||
--with-fpectl enable SIGFPE catching
|
||||
--with-libm=STRING math library
|
||||
--with-libc=STRING C library
|
||||
@@ -10065,6 +10069,31 @@ fi
|
||||
OPT="-DDYNAMIC_ANNOTATIONS_ENABLED=1 $OPT"
|
||||
fi
|
||||
|
||||
+# Check for systemtap support
|
||||
+# On Linux, /usr/bin/dtrace is in fact a shim to SystemTap
|
||||
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-systemtap" >&5
|
||||
+$as_echo_n "checking for --with-systemtap... " >&6; }
|
||||
+
|
||||
+# Check whether --with-systemtap was given.
|
||||
+if test "${with_systemtap+set}" = set; then :
|
||||
+ withval=$with_systemtap;
|
||||
+else
|
||||
+ with_systemtap=no
|
||||
+fi
|
||||
+
|
||||
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_systemtap" >&5
|
||||
+$as_echo "$with_systemtap" >&6; }
|
||||
+if test "$with_systemtap" != no; then
|
||||
+
|
||||
+$as_echo "#define WITH_SYSTEMTAP 1" >>confdefs.h
|
||||
+
|
||||
+ SYSTEMTAPOBJS="Python/pysystemtap.o"
|
||||
+ SYSTEMTAPDEPS="\$(srcdir)/Python/pysystemtap.h"
|
||||
+fi
|
||||
+
|
||||
+
|
||||
+
|
||||
+
|
||||
# -I${DLINCLDIR} is added to the compile rule for importdl.o
|
||||
|
||||
DLINCLDIR=.
|
||||
diff -up Python-3.3.0rc2/Doc/howto/index.rst.systemtap Python-3.3.0rc2/Doc/howto/index.rst
|
||||
--- Python-3.3.0rc2/Doc/howto/index.rst.systemtap 2012-09-09 05:10:51.000000000 -0400
|
||||
+++ Python-3.3.0rc2/Doc/howto/index.rst 2012-09-10 09:17:21.117511779 -0400
|
||||
@@ -29,4 +29,5 @@ Currently, the HOWTOs are:
|
||||
argparse.rst
|
||||
ipaddress.rst
|
||||
clinic.rst
|
||||
+ instrumentation.rst
|
||||
|
||||
diff -up Python-3.3.0rc2/Doc/howto/instrumentation.rst.systemtap Python-3.3.0rc2/Doc/howto/instrumentation.rst
|
||||
--- Python-3.3.0rc2/Doc/howto/instrumentation.rst.systemtap 2012-09-10 09:17:21.117511779 -0400
|
||||
+++ Python-3.3.0rc2/Doc/howto/instrumentation.rst 2012-09-10 09:17:21.117511779 -0400
|
||||
@@ -0,0 +1,295 @@
|
||||
+.. _instrumentation:
|
||||
+
|
||||
+====================================
|
||||
+Instrumenting CPython with SystemTap
|
||||
+====================================
|
||||
+
|
||||
+:author: David Malcolm <dmalcolm@redhat.com>
|
||||
+
|
||||
+DTrace and SystemTap are monitoring tools, each providing a way to inspect
|
||||
+what the processes on a computer system are doing. They both use
|
||||
+domain-specific languages allowing a user to write scripts which:
|
||||
+
|
||||
+ - filter which processes are to be observed
|
||||
+ - gather data from the processes of interest
|
||||
+ - generate reports on the data
|
||||
+
|
||||
+As of Python 3.3, CPython can be built with embedded "markers" that can be
|
||||
+observed by a SystemTap script, making it easier to monitor what the CPython
|
||||
+processes on a system are doing.
|
||||
+
|
||||
+.. Potentially this document could be expanded to also cover DTrace markers.
|
||||
+ However, I'm not a DTrace expert.
|
||||
+
|
||||
+.. I'm using ".. code-block:: c" for SystemTap scripts, as "c" is syntactically
|
||||
+ the closest match that Sphinx supports
|
||||
+
|
||||
+
|
||||
+Enabling the static markers
|
||||
+---------------------------
|
||||
+
|
||||
+In order to build CPython with the embedded markers for SystemTap, the
|
||||
+SystemTap development tools must be installed.
|
||||
+
|
||||
+On a Fedora or Red Hat Enterprise Linux machine, this can be done via::
|
||||
+
|
||||
+ yum install systemtap-sdt-devel
|
||||
+
|
||||
+CPython must then be configured `--with-systemtap`::
|
||||
+
|
||||
+ checking for --with-systemtap... yes
|
||||
+
|
||||
+You can verify if the SystemTap static markers are present in the built
|
||||
+binary by seeing if it contains a ".note.stapsdt" section.
|
||||
+
|
||||
+.. code-block:: bash
|
||||
+
|
||||
+ $ eu-readelf -S ./python | grep .note.stapsdt
|
||||
+ [29] .note.stapsdt NOTE 0000000000000000 00308d78 000000b8 0 0 0 4
|
||||
+
|
||||
+If you've built python as a shared library (with --enable-shared), you need
|
||||
+to look instead within the shared library. For example:
|
||||
+
|
||||
+.. code-block:: bash
|
||||
+
|
||||
+ $ eu-readelf -S libpython3.3dm.so.1.0 | grep .note.stapsdt
|
||||
+ [28] .note.stapsdt NOTE 0000000000000000 00365b68 000000b8 0 0 0 4
|
||||
+
|
||||
+Earlier versions of SystemTap stored the markers in a ".probes" section.
|
||||
+
|
||||
+For the curious, you can see the metadata for the static markers using this
|
||||
+invocation.
|
||||
+
|
||||
+.. code-block:: bash
|
||||
+
|
||||
+ $ eu-readelf -x .note.stapsdt ./python
|
||||
+
|
||||
+ Hex dump of section [29] '.note.stapsdt', 184 bytes at offset 0x308d78:
|
||||
+ 0x00000000 08000000 45000000 03000000 73746170 ....E.......stap
|
||||
+ 0x00000010 73647400 d4664b00 00000000 4fc36600 sdt..fK.....O.f.
|
||||
+ 0x00000020 00000000 488d9000 00000000 70797468 ....H.......pyth
|
||||
+ 0x00000030 6f6e0066 756e6374 696f6e5f 5f656e74 on.function__ent
|
||||
+ 0x00000040 72790038 40257261 78203840 25726478 ry.8@%rax 8@%rdx
|
||||
+ 0x00000050 202d3440 25656378 00000000 08000000 -4@%ecx........
|
||||
+ 0x00000060 46000000 03000000 73746170 73647400 F.......stapsdt.
|
||||
+ 0x00000070 0d674b00 00000000 4fc36600 00000000 .gK.....O.f.....
|
||||
+ 0x00000080 4a8d9000 00000000 70797468 6f6e0066 J.......python.f
|
||||
+ 0x00000090 756e6374 696f6e5f 5f726574 75726e00 unction__return.
|
||||
+ 0x000000a0 38402572 61782038 40257264 78202d34 8@%rax 8@%rdx -4
|
||||
+ 0x000000b0 40256563 78000000 @%ecx...
|
||||
+
|
||||
+and a sufficiently modern eu-readelf can print the metadata:
|
||||
+
|
||||
+.. code-block:: bash
|
||||
+
|
||||
+ $ eu-readelf -n ./python
|
||||
+
|
||||
+ Note section [ 1] '.note.gnu.build-id' of 36 bytes at offset 0x190:
|
||||
+ Owner Data size Type
|
||||
+ GNU 20 GNU_BUILD_ID
|
||||
+ Build ID: a28f8db1b224530b0d38ad7b82a249cf7c3f18d6
|
||||
+
|
||||
+ Note section [27] '.note.stapsdt' of 184 bytes at offset 0x1ae884:
|
||||
+ Owner Data size Type
|
||||
+ stapsdt 70 Version: 3
|
||||
+ PC: 0xe0d3a, Base: 0x14b150, Semaphore: 0x3ae882
|
||||
+ Provider: python, Name: function__return, Args: '8@%rbx 8@%r13 -4@%eax'
|
||||
+ stapsdt 69 Version: 3
|
||||
+ PC: 0xe0f37, Base: 0x14b150, Semaphore: 0x3ae880
|
||||
+ Provider: python, Name: function__entry, Args: '8@%rbx 8@%r13 -4@%eax'
|
||||
+
|
||||
+The above metadata contains information for SystemTap describing how it can
|
||||
+patch strategically-placed machine code instructions to enable the tracing
|
||||
+hooks used by a SystemTap script.
|
||||
+
|
||||
+
|
||||
+Static markers
|
||||
+--------------
|
||||
+
|
||||
+The low-level way to use the SystemTap integration is to use the static
|
||||
+markers directly. This requires you to explicitly state the binary file
|
||||
+containing them.
|
||||
+
|
||||
+For example, this script can be used to show the call/return hierarchy of a
|
||||
+Python script:
|
||||
+
|
||||
+.. code-block:: c
|
||||
+
|
||||
+ probe process('python').mark("function__entry") {
|
||||
+ filename = user_string($arg1);
|
||||
+ funcname = user_string($arg2);
|
||||
+ lineno = $arg3;
|
||||
+
|
||||
+ printf("%s => %s in %s:%d\\n",
|
||||
+ thread_indent(1), funcname, filename, lineno);
|
||||
+ }
|
||||
+
|
||||
+ probe process('python').mark("function__return") {
|
||||
+ filename = user_string($arg1);
|
||||
+ funcname = user_string($arg2);
|
||||
+ lineno = $arg3;
|
||||
+
|
||||
+ printf("%s <= %s in %s:%d\\n",
|
||||
+ thread_indent(-1), funcname, filename, lineno);
|
||||
+ }
|
||||
+
|
||||
+It can be invoked like this:
|
||||
+
|
||||
+.. code-block:: bash
|
||||
+
|
||||
+ $ stap \
|
||||
+ show-call-hierarchy.stp \
|
||||
+ -c ./python test.py
|
||||
+
|
||||
+The output looks like this::
|
||||
+
|
||||
+ 11408 python(8274): => __contains__ in Lib/_abcoll.py:362
|
||||
+ 11414 python(8274): => __getitem__ in Lib/os.py:425
|
||||
+ 11418 python(8274): => encode in Lib/os.py:490
|
||||
+ 11424 python(8274): <= encode in Lib/os.py:493
|
||||
+ 11428 python(8274): <= __getitem__ in Lib/os.py:426
|
||||
+ 11433 python(8274): <= __contains__ in Lib/_abcoll.py:366
|
||||
+
|
||||
+where the columns are:
|
||||
+
|
||||
+ - time in microseconds since start of script
|
||||
+
|
||||
+ - name of executable
|
||||
+
|
||||
+ - PID of process
|
||||
+
|
||||
+and the remainder indicates the call/return hierarchy as the script executes.
|
||||
+
|
||||
+For a `--enable-shared` build of CPython, the markers are contained within the
|
||||
+libpython shared library, and the probe's dotted path needs to reflect this. For
|
||||
+example, this line from the above example::
|
||||
+
|
||||
+ probe process('python').mark("function__entry") {
|
||||
+
|
||||
+should instead read::
|
||||
+
|
||||
+ probe process('python').library("libpython3.3dm.so.1.0").mark("function__entry") {
|
||||
+
|
||||
+(assuming a debug build of CPython 3.3)
|
||||
+
|
||||
+.. I'm reusing the "c:function" type for markers
|
||||
+
|
||||
+.. c:function:: function__entry(str filename, str funcname, int lineno)
|
||||
+
|
||||
+ This marker indicates that execution of a Python function has begun. It is
|
||||
+ only triggered for pure-python (bytecode) functions.
|
||||
+
|
||||
+ The filename, function name, and line number are provided back to the
|
||||
+ tracing script as positional arguments, which must be accessed using
|
||||
+ `$arg1`, `$arg2`:
|
||||
+
|
||||
+ * `$arg1` : `(const char *)` filename, accessible using `user_string($arg1)`
|
||||
+
|
||||
+ * `$arg2` : `(const char *)` function name, accessible using
|
||||
+ `user_string($arg2)`
|
||||
+
|
||||
+ * `$arg3` : `int` line number
|
||||
+
|
||||
+ * `$arg4` : `(PyFrameObject *)`, the frame being executed
|
||||
+
|
||||
+.. c:function:: function__return(str filename, str funcname, int lineno)
|
||||
+
|
||||
+ This marker is the converse of `function__entry`, and indicates that
|
||||
+ execution of a Python function has ended (either via ``return``, or via an
|
||||
+ exception). It is only triggered for pure-python (bytecode) functions.
|
||||
+
|
||||
+ The arguments are the same as for `function__entry`
|
||||
+
|
||||
+
|
||||
+Tapsets
|
||||
+-------
|
||||
+
|
||||
+The higher-level way to use the SystemTap integration is to use a "tapset":
|
||||
+SystemTap's equivalent of a library, which hides some of the lower-level
|
||||
+details of the static markers.
|
||||
+
|
||||
+Here is a tapset file, based on a non-shared build of CPython:
|
||||
+
|
||||
+.. code-block:: c
|
||||
+
|
||||
+ /*
|
||||
+ Provide a higher-level wrapping around the function__entry and
|
||||
+ function__return markers:
|
||||
+ */
|
||||
+ probe python.function.entry = process("python").mark("function__entry")
|
||||
+ {
|
||||
+ filename = user_string($arg1);
|
||||
+ funcname = user_string($arg2);
|
||||
+ lineno = $arg3;
|
||||
+ frameptr = $arg4
|
||||
+ }
|
||||
+ probe python.function.return = process("python").mark("function__return")
|
||||
+ {
|
||||
+ filename = user_string($arg1);
|
||||
+ funcname = user_string($arg2);
|
||||
+ lineno = $arg3;
|
||||
+ frameptr = $arg4
|
||||
+ }
|
||||
+
|
||||
+If this file is installed in SystemTap's tapset directory (e.g.
|
||||
+`/usr/share/systemtap/tapset`), then these additional probepoints become
|
||||
+available:
|
||||
+
|
||||
+.. c:function:: python.function.entry(str filename, str funcname, int lineno, frameptr)
|
||||
+
|
||||
+ This probe point indicates that execution of a Python function has begun.
|
||||
+ It is only triggered for pure-python (bytecode) functions.
|
||||
+
|
||||
+.. c:function:: python.function.return(str filename, str funcname, int lineno, frameptr)
|
||||
+
|
||||
+ This probe point is the converse of `python.function.return`, and indicates
|
||||
+ that execution of a Python function has ended (either via ``return``, or
|
||||
+ via an exception). It is only triggered for pure-python (bytecode) functions.
|
||||
+
|
||||
+
|
||||
+Examples
|
||||
+--------
|
||||
+This SystemTap script uses the tapset above to more cleanly implement the
|
||||
+example given above of tracing the Python function-call hierarchy, without
|
||||
+needing to directly name the static markers:
|
||||
+
|
||||
+.. code-block:: c
|
||||
+
|
||||
+ probe python.function.entry
|
||||
+ {
|
||||
+ printf("%s => %s in %s:%d\n",
|
||||
+ thread_indent(1), funcname, filename, lineno);
|
||||
+ }
|
||||
+
|
||||
+ probe python.function.return
|
||||
+ {
|
||||
+ printf("%s <= %s in %s:%d\n",
|
||||
+ thread_indent(-1), funcname, filename, lineno);
|
||||
+ }
|
||||
+
|
||||
+
|
||||
+The following script uses the tapset above to provide a top-like view of all
|
||||
+running CPython code, showing the top 20 most frequently-entered bytecode
|
||||
+frames, each second, across the whole system:
|
||||
+
|
||||
+.. code-block:: c
|
||||
+
|
||||
+ global fn_calls;
|
||||
+
|
||||
+ probe python.function.entry
|
||||
+ {
|
||||
+ fn_calls[pid(), filename, funcname, lineno] += 1;
|
||||
+ }
|
||||
+
|
||||
+ probe timer.ms(1000) {
|
||||
+ printf("\033[2J\033[1;1H") /* clear screen */
|
||||
+ printf("%6s %80s %6s %30s %6s\n",
|
||||
+ "PID", "FILENAME", "LINE", "FUNCTION", "CALLS")
|
||||
+ foreach ([pid, filename, funcname, lineno] in fn_calls- limit 20) {
|
||||
+ printf("%6d %80s %6d %30s %6d\n",
|
||||
+ pid, filename, lineno, funcname,
|
||||
+ fn_calls[pid, filename, funcname, lineno]);
|
||||
+ }
|
||||
+ delete fn_calls;
|
||||
+ }
|
||||
+
|
||||
diff -up Python-3.3.0rc2/Lib/test/test_systemtap.py.systemtap Python-3.3.0rc2/Lib/test/test_systemtap.py
|
||||
--- Python-3.3.0rc2/Lib/test/test_systemtap.py.systemtap 2012-09-10 09:17:21.117511779 -0400
|
||||
+++ Python-3.3.0rc2/Lib/test/test_systemtap.py 2012-09-10 09:17:21.117511779 -0400
|
||||
@@ -0,0 +1,234 @@
|
||||
+# Verify that systemtap static probes work
|
||||
+#
|
||||
+import subprocess
|
||||
+import sys
|
||||
+import sysconfig
|
||||
+import os
|
||||
+import unittest
|
||||
+
|
||||
+from test.support import run_unittest, TESTFN, unlink
|
||||
+
|
||||
+if '--with-systemtap' not in sysconfig.get_config_var('CONFIG_ARGS'):
|
||||
+ raise unittest.SkipTest("Python was not configured --with-systemtap")
|
||||
+
|
||||
+try:
|
||||
+ _, stap_version = subprocess.Popen(["stap", "-V"],
|
||||
+ stdout=subprocess.PIPE,
|
||||
+ stderr=subprocess.PIPE,
|
||||
+ ).communicate()
|
||||
+except OSError:
|
||||
+ # This is what "no stap" looks like. There may, however, be other
|
||||
+ # errors that manifest this way too.
|
||||
+ raise unittest.SkipTest("Couldn't find stap on the path")
|
||||
+
|
||||
+def invoke_systemtap_script(script, cmd):
|
||||
+ # Start a child process, probing with the given systemtap script
|
||||
+ # (passed as stdin to the "stap" tool)
|
||||
+ # The script should be a bytes instance
|
||||
+ # Return (stdout, stderr) pair
|
||||
+
|
||||
+ p = subprocess.Popen(["stap", "-", '-vv', '-c', cmd],
|
||||
+ stdin=subprocess.PIPE,
|
||||
+ stdout=subprocess.PIPE,
|
||||
+ stderr=subprocess.PIPE)
|
||||
+ out, err = p.communicate(input=script)
|
||||
+ return out, err
|
||||
+
|
||||
+# Verify that stap can run a simple "hello world"-style script
|
||||
+# This can fail for various reasons:
|
||||
+# - missing kernel headers
|
||||
+# - permissions (a non-root user needs to be in the "stapdev" group)
|
||||
+TRIVIAL_STAP_SCRIPT = b'probe begin { println("hello world") exit () }'
|
||||
+
|
||||
+out, err = invoke_systemtap_script(TRIVIAL_STAP_SCRIPT, 'true')
|
||||
+if out != b'hello world\n':
|
||||
+ raise unittest.SkipTest("Test systemtap script did not run; stderr was: %s" % err)
|
||||
+
|
||||
+# We don't expect stderr to be empty, since we're invoking stap with "-vv": stap
|
||||
+# will (we hope) generate debugging output on stderr.
|
||||
+
|
||||
+def invoke_python_under_systemtap(script, pythoncode=None, pythonfile=None):
|
||||
+ # Start a child python process, probing with the given systemtap script
|
||||
+ # (passed as stdin to the "stap" tool)
|
||||
+ # The script should be a bytes instance
|
||||
+ # Return (stdout, stderr) pair
|
||||
+
|
||||
+ if pythonfile:
|
||||
+ pythoncmd = '%s %s' % (sys.executable, pythonfile)
|
||||
+ else:
|
||||
+ pythoncmd = '%s -c %r' % (sys.executable, pythoncode)
|
||||
+
|
||||
+ # The process tree of a stap invocation of a command goes through
|
||||
+ # something like this:
|
||||
+ # stap ->fork/exec(staprun; exec stapio ->f/e(-c cmd); exec staprun -r)
|
||||
+ # and this trip through setuid leads to LD_LIBRARY_PATH being dropped,
|
||||
+ # which would lead to an --enable-shared build of python failing to be
|
||||
+ # find its libpython, with an error like:
|
||||
+ # error while loading shared libraries: libpython3.3dm.so.1.0: cannot
|
||||
+ # open shared object file: No such file or directory
|
||||
+ # Hence we need to jump through some hoops to expose LD_LIBRARY_PATH to
|
||||
+ # the invoked python process:
|
||||
+ LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '')
|
||||
+ if LD_LIBRARY_PATH:
|
||||
+ pythoncmd = 'env LD_LIBRARY_PATH=%s ' % LD_LIBRARY_PATH + pythoncmd
|
||||
+
|
||||
+ return invoke_systemtap_script(script, pythoncmd)
|
||||
+
|
||||
+# When using the static markers, we need to supply the prefix of a systemtap
|
||||
+# dotted probe point that containing the marker.
|
||||
+# See http://sourceware.org/systemtap/langref/Probe_points.html
|
||||
+#
|
||||
+# We need to determine if this is a shared-library build
|
||||
+#
|
||||
+# Note that sysconfig can get this wrong; see:
|
||||
+# http://bugs.python.org/issue14774
|
||||
+#
|
||||
+if '--enable-shared' in sysconfig.get_config_var('CONFIG_ARGS'):
|
||||
+ # For a shared-library build, the markers are in library(INSTSONAME):
|
||||
+ INSTSONAME = sysconfig.get_config_var('INSTSONAME')
|
||||
+ probe_prefix = 'process("%s").library("%s")' % (sys.executable, INSTSONAME)
|
||||
+else:
|
||||
+ # For a non-shared-library build, we can simply use sys.executable:
|
||||
+ probe_prefix = 'process("%s")' % sys.executable
|
||||
+
|
||||
+# The following script ought to generate lots of lines showing recursive
|
||||
+# function entry and return, of the form:
|
||||
+# 11408 python(8274): => __contains__ in Lib/_abcoll.py:362
|
||||
+# 11414 python(8274): => __getitem__ in Lib/os.py:425
|
||||
+# 11418 python(8274): => encode in Lib/os.py:490
|
||||
+# 11424 python(8274): <= encode in Lib/os.py:493
|
||||
+# 11428 python(8274): <= __getitem__ in Lib/os.py:426
|
||||
+# 11433 python(8274): <= __contains__ in Lib/_abcoll.py:366
|
||||
+# where the column are:
|
||||
+# - time in microseconds since start of script
|
||||
+# - name of executable
|
||||
+# - PID of process
|
||||
+# and the remainder indicates the call/return hierarchy
|
||||
+
|
||||
+hierarchy_script = ('''
|
||||
+probe %s.mark("function__entry") {
|
||||
+ filename = user_string($arg1);
|
||||
+ funcname = user_string($arg2);
|
||||
+ lineno = $arg3;
|
||||
+
|
||||
+ printf("%%s => %%s in %%s:%%d\\n", thread_indent(1), funcname, filename, lineno);
|
||||
+}
|
||||
+
|
||||
+probe %s.mark("function__return") {
|
||||
+ filename = user_string($arg1);
|
||||
+ funcname = user_string($arg2);
|
||||
+ lineno = $arg3;
|
||||
+
|
||||
+ printf("%%s <= %%s in %%s:%%d\\n", thread_indent(-1), funcname, filename, lineno);
|
||||
+}
|
||||
+''' % (probe_prefix, probe_prefix)).encode('utf-8')
|
||||
+
|
||||
+
|
||||
+class ErrorDumper:
|
||||
+ # A context manager that dumps extra information if an exception is raised,
|
||||
+ # to help track down why the problem occurred
|
||||
+ def __init__(self, out, err):
|
||||
+ self.out = out
|
||||
+ self.err = err
|
||||
+
|
||||
+ def __enter__(self):
|
||||
+ pass
|
||||
+
|
||||
+ def __exit__(self, type_, value, traceback):
|
||||
+ if type_:
|
||||
+ # an exception is being raised:
|
||||
+ print('stdout: %s' % out.decode())
|
||||
+ print('stderr: %s' % err.decode())
|
||||
+
|
||||
+class SystemtapTests(unittest.TestCase):
|
||||
+
|
||||
+ def test_invoking_python(self):
|
||||
+ # Ensure that we can invoke python under stap, with a trivial stap
|
||||
+ # script:
|
||||
+ out, err = invoke_python_under_systemtap(
|
||||
+ b'probe begin { println("hello from stap") exit () }',
|
||||
+ pythoncode="print('hello from python')")
|
||||
+ with ErrorDumper(out, err):
|
||||
+ self.assertIn(b'hello from stap', out)
|
||||
+ self.assertIn(b'hello from python', out)
|
||||
+
|
||||
+ def test_function_entry(self):
|
||||
+ # Ensure that the function_entry static marker works
|
||||
+ out, err = invoke_python_under_systemtap(hierarchy_script)
|
||||
+ # stdout ought to contain various lines showing recursive function
|
||||
+ # entry and return (see above)
|
||||
+
|
||||
+ # Uncomment this for debugging purposes:
|
||||
+ # print(out.decode('utf-8'))
|
||||
+
|
||||
+ # Executing the cmdline-supplied "pass":
|
||||
+ # 0 python(8274): => <module> in <string>:1
|
||||
+ # 5 python(8274): <= <module> in <string>:1
|
||||
+ with ErrorDumper(out, err):
|
||||
+ self.assertIn(b'=> <module> in <string>:1', out,
|
||||
+ msg="stdout: %s\nstderr: %s\n" % (out, err))
|
||||
+
|
||||
+ def test_function_encoding(self):
|
||||
+ # Ensure that function names containing non-Latin 1 code
|
||||
+ # points are handled:
|
||||
+ pythonfile = TESTFN
|
||||
+ try:
|
||||
+ unlink(pythonfile)
|
||||
+ f = open(pythonfile, "wb")
|
||||
+ f.write("""
|
||||
+# Sample script with non-ASCII filename, for use by test_systemtap.py
|
||||
+# Implicitly UTF-8
|
||||
+
|
||||
+def 文字化け():
|
||||
+ '''Function with non-ASCII identifier; I believe this reads "mojibake"'''
|
||||
+ print("hello world!")
|
||||
+
|
||||
+文字化け()
|
||||
+""".encode('utf-8'))
|
||||
+ f.close()
|
||||
+
|
||||
+ out, err = invoke_python_under_systemtap(hierarchy_script,
|
||||
+ pythonfile=pythonfile)
|
||||
+ out_utf8 = out.decode('utf-8')
|
||||
+ with ErrorDumper(out, err):
|
||||
+ self.assertIn('=> <module> in %s:5' % pythonfile, out_utf8)
|
||||
+ self.assertIn(' => 文字化け in %s:5' % pythonfile, out_utf8)
|
||||
+ self.assertIn(' <= 文字化け in %s:7' % pythonfile, out_utf8)
|
||||
+ self.assertIn('<= <module> in %s:9' % pythonfile, out_utf8)
|
||||
+ finally:
|
||||
+ unlink(pythonfile)
|
||||
+
|
||||
+ @unittest.skipIf(sys.getfilesystemencoding() == 'ascii',
|
||||
+ 'the test filename is not encodable with ASCII')
|
||||
+ def test_filename_encoding(self):
|
||||
+ # Ensure that scripts names containing non-Latin 1 code
|
||||
+ # points are handled:
|
||||
+ pythonfile = TESTFN + '_☠.py'
|
||||
+ try:
|
||||
+ unlink(pythonfile)
|
||||
+ f = open(pythonfile, "wb")
|
||||
+ f.write("""
|
||||
+def foo():
|
||||
+ '''Function with non-ASCII identifier; I believe this reads "mojibake"'''
|
||||
+ print("hello world!")
|
||||
+
|
||||
+foo()
|
||||
+""".encode('utf-8'))
|
||||
+ f.close()
|
||||
+
|
||||
+ out, err = invoke_python_under_systemtap(hierarchy_script,
|
||||
+ pythonfile=pythonfile)
|
||||
+ out_utf8 = out.decode('utf-8')
|
||||
+ with ErrorDumper(out, err):
|
||||
+ self.assertIn('=> <module> in %s:2' % pythonfile, out_utf8)
|
||||
+ self.assertIn(' => foo in %s:2' % pythonfile, out_utf8)
|
||||
+ self.assertIn(' <= foo in %s:4' % pythonfile, out_utf8)
|
||||
+ self.assertIn('<= <module> in %s:6' % pythonfile, out_utf8)
|
||||
+ finally:
|
||||
+ unlink(pythonfile)
|
||||
+
|
||||
+def test_main():
|
||||
+ run_unittest(SystemtapTests)
|
||||
+
|
||||
+if __name__ == "__main__":
|
||||
+ test_main()
|
||||
diff -up Python-3.3.0rc2/Makefile.pre.in.systemtap Python-3.3.0rc2/Makefile.pre.in
|
||||
--- Python-3.3.0rc2/Makefile.pre.in.systemtap 2012-09-09 05:11:05.000000000 -0400
|
||||
+++ Python-3.3.0rc2/Makefile.pre.in 2012-09-10 09:19:51.195501518 -0400
|
||||
@@ -363,6 +363,7 @@ PYTHON_OBJS= \
|
||||
Python/formatter_unicode.o \
|
||||
Python/fileutils.o \
|
||||
Python/$(DYNLOADFILE) \
|
||||
+ @SYSTEMTAPOBJS@ \
|
||||
$(LIBOBJS) \
|
||||
$(MACHDEP_OBJS) \
|
||||
$(THREADOBJ)
|
||||
@@ -713,7 +714,8 @@ Objects/setobject.o: $(srcdir)/Objects/s
|
||||
$(OPCODETARGETS_H): $(OPCODETARGETGEN_FILES)
|
||||
$(OPCODETARGETGEN) $(OPCODETARGETS_H)
|
||||
|
||||
-Python/ceval.o: $(OPCODETARGETS_H) $(srcdir)/Python/ceval_gil.h
|
||||
+Python/ceval.o: $(OPCODETARGETS_H) $(srcdir)/Python/ceval_gil.h \
|
||||
+ $(srcdir)/Python/ceval_systemtap.h @SYSTEMTAPDEPS@
|
||||
|
||||
Python/frozen.o: Python/importlib.h Python/importlib_external.h
|
||||
|
||||
@@ -724,6 +726,13 @@ Objects/typeobject.o: $(srcdir)/Objects/
|
||||
Objects/typeslots.inc: $(srcdir)/Include/typeslots.h $(srcdir)/Objects/typeslots.py
|
||||
$(PYTHON) $(srcdir)/Objects/typeslots.py < $(srcdir)/Include/typeslots.h > Objects/typeslots.inc
|
||||
|
||||
+# Only needed with --with-systemtap; not a public header:
|
||||
+$(srcdir)/Python/pysystemtap.h: $(srcdir)/Python/pysystemtap.d
|
||||
+ dtrace -o $@ $(DFLAGS) -C -h -s $(srcdir)/Python/pysystemtap.d
|
||||
+
|
||||
+Python/pysystemtap.o: $(srcdir)/Python/pysystemtap.d Python/ceval.o
|
||||
+ dtrace -o $@ $(DFLAGS) -C -G -s $(srcdir)/Python/pysystemtap.d Python/ceval.o
|
||||
+
|
||||
############################################################################
|
||||
# Header files
|
||||
|
||||
@@ -1345,6 +1354,7 @@ clean: pycremoval
|
||||
-rm -f Lib/lib2to3/*Grammar*.pickle
|
||||
-rm -f Programs/_testembed Programs/_freeze_importlib
|
||||
-rm -rf build
|
||||
+ -rm -f $(srcdir)/Python/pysystemtap.h
|
||||
|
||||
profile-removal:
|
||||
find . -name '*.gc??' -exec rm -f {} ';'
|
||||
diff -up Python-3.3.0rc2/pyconfig.h.in.systemtap Python-3.3.0rc2/pyconfig.h.in
|
||||
--- Python-3.3.0rc2/pyconfig.h.in.systemtap 2012-09-09 05:11:14.000000000 -0400
|
||||
+++ Python-3.3.0rc2/pyconfig.h.in 2012-09-10 09:17:21.120511781 -0400
|
||||
@@ -1306,6 +1306,9 @@
|
||||
/* Define if you want to compile in Python-specific mallocs */
|
||||
#undef WITH_PYMALLOC
|
||||
|
||||
+/* Define if you want to compile in SystemTap support */
|
||||
+#undef WITH_SYSTEMTAP
|
||||
+
|
||||
/* Define if you want to compile in rudimentary thread support */
|
||||
#undef WITH_THREAD
|
||||
|
||||
diff -up Python-3.3.0rc2/Python/ceval.c.systemtap Python-3.3.0rc2/Python/ceval.c
|
||||
--- Python-3.3.0rc2/Python/ceval.c.systemtap 2012-09-09 05:11:12.000000000 -0400
|
||||
+++ Python-3.3.0rc2/Python/ceval.c 2012-09-10 09:17:21.122511781 -0400
|
||||
@@ -18,6 +18,8 @@
|
||||
|
||||
#include <ctype.h>
|
||||
|
||||
+#include "ceval_systemtap.h"
|
||||
+
|
||||
#ifndef WITH_TSC
|
||||
|
||||
#define READ_TIMESTAMP(var)
|
||||
@@ -1160,6 +1162,10 @@ PyEval_EvalFrameEx(PyFrameObject *f, int
|
||||
}
|
||||
}
|
||||
|
||||
+ if (PYTHON_FUNCTION_ENTRY_ENABLED()) {
|
||||
+ systemtap_function_entry(f);
|
||||
+ }
|
||||
+
|
||||
co = f->f_code;
|
||||
names = co->co_names;
|
||||
consts = co->co_consts;
|
||||
@@ -3077,6 +3083,11 @@ fast_yield:
|
||||
|
||||
/* pop frame */
|
||||
exit_eval_frame:
|
||||
+
|
||||
+ if (PYTHON_FUNCTION_RETURN_ENABLED()) {
|
||||
+ systemtap_function_return(f);
|
||||
+ }
|
||||
+
|
||||
Py_LeaveRecursiveCall();
|
||||
f->f_executing = 0;
|
||||
tstate->frame = f->f_back;
|
||||
diff -up Python-3.3.0rc2/Python/ceval_systemtap.h.systemtap Python-3.3.0rc2/Python/ceval_systemtap.h
|
||||
--- Python-3.3.0rc2/Python/ceval_systemtap.h.systemtap 2012-09-10 09:17:21.122511781 -0400
|
||||
+++ Python-3.3.0rc2/Python/ceval_systemtap.h 2012-09-10 09:17:21.122511781 -0400
|
||||
@@ -0,0 +1,86 @@
|
||||
+/*
|
||||
+ Support for SystemTap static markers
|
||||
+*/
|
||||
+
|
||||
+#ifdef WITH_SYSTEMTAP
|
||||
+
|
||||
+#include "pysystemtap.h"
|
||||
+
|
||||
+/*
|
||||
+ A struct to hold all of the information gathered when one of the traceable
|
||||
+ markers is triggered
|
||||
+*/
|
||||
+struct frame_marker_info
|
||||
+{
|
||||
+ PyObject *filename_obj;
|
||||
+ PyObject *funcname_obj;
|
||||
+ const char *filename;
|
||||
+ const char *funcname;
|
||||
+ int lineno;
|
||||
+};
|
||||
+
|
||||
+static void
|
||||
+get_frame_marker_info(PyFrameObject *f, struct frame_marker_info *fmi)
|
||||
+{
|
||||
+ PyObject *ptype;
|
||||
+ PyObject *pvalue;
|
||||
+ PyObject *ptraceback;
|
||||
+
|
||||
+ PyErr_Fetch(&ptype, &pvalue, &ptraceback);
|
||||
+
|
||||
+ fmi->filename_obj = PyUnicode_EncodeFSDefault(f->f_code->co_filename);
|
||||
+ if (fmi->filename_obj) {
|
||||
+ fmi->filename = PyBytes_AsString(fmi->filename_obj);
|
||||
+ } else {
|
||||
+ fmi->filename = NULL;
|
||||
+ }
|
||||
+
|
||||
+ fmi->funcname_obj = PyUnicode_AsUTF8String(f->f_code->co_name);
|
||||
+ if (fmi->funcname_obj) {
|
||||
+ fmi->funcname = PyBytes_AsString(fmi->funcname_obj);
|
||||
+ } else {
|
||||
+ fmi->funcname = NULL;
|
||||
+ }
|
||||
+
|
||||
+ fmi->lineno = PyCode_Addr2Line(f->f_code, f->f_lasti);
|
||||
+
|
||||
+ PyErr_Restore(ptype, pvalue, ptraceback);
|
||||
+
|
||||
+}
|
||||
+
|
||||
+static void
|
||||
+release_frame_marker_info(struct frame_marker_info *fmi)
|
||||
+{
|
||||
+ Py_XDECREF(fmi->filename_obj);
|
||||
+ Py_XDECREF(fmi->funcname_obj);
|
||||
+}
|
||||
+
|
||||
+static void
|
||||
+systemtap_function_entry(PyFrameObject *f)
|
||||
+{
|
||||
+ struct frame_marker_info fmi;
|
||||
+ get_frame_marker_info(f, &fmi);
|
||||
+ PYTHON_FUNCTION_ENTRY(fmi.filename, fmi.funcname, fmi.lineno, f);
|
||||
+ release_frame_marker_info(&fmi);
|
||||
+}
|
||||
+
|
||||
+static void
|
||||
+systemtap_function_return(PyFrameObject *f)
|
||||
+{
|
||||
+ struct frame_marker_info fmi;
|
||||
+ get_frame_marker_info(f, &fmi);
|
||||
+ PYTHON_FUNCTION_RETURN(fmi.filename, fmi.funcname, fmi.lineno, f);
|
||||
+ release_frame_marker_info(&fmi);
|
||||
+}
|
||||
+
|
||||
+#else /* #ifdef WITH_SYSTEMTAP */
|
||||
+
|
||||
+/*
|
||||
+ When configured --without-systemtap, everything compiles away to nothing:
|
||||
+*/
|
||||
+#define PYTHON_FUNCTION_ENTRY_ENABLED() 0
|
||||
+#define PYTHON_FUNCTION_RETURN_ENABLED() 0
|
||||
+#define systemtap_function_entry(f)
|
||||
+#define systemtap_function_return(f)
|
||||
+
|
||||
+#endif
|
||||
diff -up Python-3.3.0rc2/Python/pysystemtap.d.systemtap Python-3.3.0rc2/Python/pysystemtap.d
|
||||
--- Python-3.3.0rc2/Python/pysystemtap.d.systemtap 2012-09-10 09:17:21.122511781 -0400
|
||||
+++ Python-3.3.0rc2/Python/pysystemtap.d 2012-09-10 09:17:21.122511781 -0400
|
||||
@@ -0,0 +1,4 @@
|
||||
+provider python {
|
||||
+ probe function__entry(const char *, const char *, int, PyFrameObject *);
|
||||
+ probe function__return(const char *, const char *, int, PyFrameObject *);
|
||||
+};
|
|
@ -0,0 +1,188 @@
|
|||
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
|
||||
index 9474e9c..c0ce4c6 100644
|
||||
--- a/Lib/distutils/command/install.py
|
||||
+++ b/Lib/distutils/command/install.py
|
||||
@@ -30,14 +30,14 @@ WINDOWS_SCHEME = {
|
||||
INSTALL_SCHEMES = {
|
||||
'unix_prefix': {
|
||||
'purelib': '$base/lib/python$py_version_short/site-packages',
|
||||
- 'platlib': '$platbase/lib/python$py_version_short/site-packages',
|
||||
+ 'platlib': '$platbase/lib64/python$py_version_short/site-packages',
|
||||
'headers': '$base/include/python$py_version_short$abiflags/$dist_name',
|
||||
'scripts': '$base/bin',
|
||||
'data' : '$base',
|
||||
},
|
||||
'unix_home': {
|
||||
'purelib': '$base/lib/python',
|
||||
- 'platlib': '$base/lib/python',
|
||||
+ 'platlib': '$base/lib64/python',
|
||||
'headers': '$base/include/python/$dist_name',
|
||||
'scripts': '$base/bin',
|
||||
'data' : '$base',
|
||||
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
|
||||
index 026cca7..6d3e077 100644
|
||||
--- a/Lib/distutils/sysconfig.py
|
||||
+++ b/Lib/distutils/sysconfig.py
|
||||
@@ -132,8 +132,12 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
|
||||
prefix = plat_specific and EXEC_PREFIX or PREFIX
|
||||
|
||||
if os.name == "posix":
|
||||
+ if plat_specific or standard_lib:
|
||||
+ lib = "lib64"
|
||||
+ else:
|
||||
+ lib = "lib"
|
||||
libpython = os.path.join(prefix,
|
||||
- "lib", "python" + get_python_version())
|
||||
+ lib, "python" + get_python_version())
|
||||
if standard_lib:
|
||||
return libpython
|
||||
else:
|
||||
diff --git a/Lib/site.py b/Lib/site.py
|
||||
index a84e3bb..ba0d3ea 100644
|
||||
--- a/Lib/site.py
|
||||
+++ b/Lib/site.py
|
||||
@@ -303,11 +303,15 @@ def getsitepackages(prefixes=None):
|
||||
seen.add(prefix)
|
||||
|
||||
if os.sep == '/':
|
||||
+ sitepackages.append(os.path.join(prefix, "lib64",
|
||||
+ "python" + sys.version[:3],
|
||||
+ "site-packages"))
|
||||
sitepackages.append(os.path.join(prefix, "lib",
|
||||
"python%d.%d" % sys.version_info[:2],
|
||||
"site-packages"))
|
||||
else:
|
||||
sitepackages.append(prefix)
|
||||
+ sitepackages.append(os.path.join(prefix, "lib64", "site-packages"))
|
||||
sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
|
||||
if sys.platform == "darwin":
|
||||
# for framework builds *only* we add the standard Apple
|
||||
diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py
|
||||
index b9bbfe5..2a5f29c 100644
|
||||
--- a/Lib/sysconfig.py
|
||||
+++ b/Lib/sysconfig.py
|
||||
@@ -20,10 +20,10 @@ __all__ = [
|
||||
|
||||
_INSTALL_SCHEMES = {
|
||||
'posix_prefix': {
|
||||
- 'stdlib': '{installed_base}/lib/python{py_version_short}',
|
||||
- 'platstdlib': '{platbase}/lib/python{py_version_short}',
|
||||
+ 'stdlib': '{installed_base}/lib64/python{py_version_short}',
|
||||
+ 'platstdlib': '{platbase}/lib64/python{py_version_short}',
|
||||
'purelib': '{base}/lib/python{py_version_short}/site-packages',
|
||||
- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages',
|
||||
+ 'platlib': '{platbase}/lib64/python{py_version_short}/site-packages',
|
||||
'include':
|
||||
'{installed_base}/include/python{py_version_short}{abiflags}',
|
||||
'platinclude':
|
||||
@@ -61,10 +61,10 @@ _INSTALL_SCHEMES = {
|
||||
'data': '{userbase}',
|
||||
},
|
||||
'posix_user': {
|
||||
- 'stdlib': '{userbase}/lib/python{py_version_short}',
|
||||
- 'platstdlib': '{userbase}/lib/python{py_version_short}',
|
||||
+ 'stdlib': '{userbase}/lib64/python{py_version_short}',
|
||||
+ 'platstdlib': '{userbase}/lib64/python{py_version_short}',
|
||||
'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
|
||||
- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages',
|
||||
+ 'platlib': '{userbase}/lib64/python{py_version_short}/site-packages',
|
||||
'include': '{userbase}/include/python{py_version_short}',
|
||||
'scripts': '{userbase}/bin',
|
||||
'data': '{userbase}',
|
||||
diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py
|
||||
index f698927..bc977b5 100644
|
||||
--- a/Lib/test/test_site.py
|
||||
+++ b/Lib/test/test_site.py
|
||||
@@ -248,8 +248,8 @@ class HelperFunctionsTests(unittest.TestCase):
|
||||
self.assertEqual(dirs[1], wanted)
|
||||
elif os.sep == '/':
|
||||
# OS X non-framework builds, Linux, FreeBSD, etc
|
||||
- self.assertEqual(len(dirs), 1)
|
||||
- wanted = os.path.join('xoxo', 'lib',
|
||||
+ self.assertEqual(len(dirs), 2)
|
||||
+ wanted = os.path.join('xoxo', 'lib64',
|
||||
'python%d.%d' % sys.version_info[:2],
|
||||
'site-packages')
|
||||
self.assertEqual(dirs[0], wanted)
|
||||
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||
index 8fa7934..a693917 100644
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -126,7 +126,7 @@ LIBDIR= @libdir@
|
||||
MANDIR= @mandir@
|
||||
INCLUDEDIR= @includedir@
|
||||
CONFINCLUDEDIR= $(exec_prefix)/include
|
||||
-SCRIPTDIR= $(prefix)/lib
|
||||
+SCRIPTDIR= $(prefix)/lib64
|
||||
ABIFLAGS= @ABIFLAGS@
|
||||
|
||||
# Detailed destination directories
|
||||
diff --git a/Modules/getpath.c b/Modules/getpath.c
|
||||
index 65b47a3..eaa756c 100644
|
||||
--- a/Modules/getpath.c
|
||||
+++ b/Modules/getpath.c
|
||||
@@ -494,7 +494,7 @@ calculate_path(void)
|
||||
_pythonpath = Py_DecodeLocale(PYTHONPATH, NULL);
|
||||
_prefix = Py_DecodeLocale(PREFIX, NULL);
|
||||
_exec_prefix = Py_DecodeLocale(EXEC_PREFIX, NULL);
|
||||
- lib_python = Py_DecodeLocale("lib/python" VERSION, NULL);
|
||||
+ lib_python = Py_DecodeLocale("lib64/python" VERSION, NULL);
|
||||
|
||||
if (!_pythonpath || !_prefix || !_exec_prefix || !lib_python) {
|
||||
Py_FatalError(
|
||||
@@ -683,7 +683,7 @@ calculate_path(void)
|
||||
}
|
||||
else
|
||||
wcsncpy(zip_path, _prefix, MAXPATHLEN);
|
||||
- joinpath(zip_path, L"lib/python00.zip");
|
||||
+ joinpath(zip_path, L"lib64/python00.zip");
|
||||
bufsz = wcslen(zip_path); /* Replace "00" with version */
|
||||
zip_path[bufsz - 6] = VERSION[0];
|
||||
zip_path[bufsz - 5] = VERSION[2];
|
||||
@@ -695,7 +695,7 @@ calculate_path(void)
|
||||
fprintf(stderr,
|
||||
"Could not find platform dependent libraries <exec_prefix>\n");
|
||||
wcsncpy(exec_prefix, _exec_prefix, MAXPATHLEN);
|
||||
- joinpath(exec_prefix, L"lib/lib-dynload");
|
||||
+ joinpath(exec_prefix, L"lib64/lib-dynload");
|
||||
}
|
||||
/* If we found EXEC_PREFIX do *not* reduce it! (Yet.) */
|
||||
|
||||
diff --git a/setup.py b/setup.py
|
||||
index 0f2dfc4..da37896 100644
|
||||
--- a/setup.py
|
||||
+++ b/setup.py
|
||||
@@ -492,7 +492,7 @@ class PyBuildExt(build_ext):
|
||||
# directories (i.e. '.' and 'Include') must be first. See issue
|
||||
# 10520.
|
||||
if not cross_compiling:
|
||||
- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
|
||||
+ add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib64')
|
||||
add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
|
||||
# only change this for cross builds for 3.3, issues on Mageia
|
||||
if cross_compiling:
|
||||
@@ -780,11 +780,11 @@ class PyBuildExt(build_ext):
|
||||
elif curses_library:
|
||||
readline_libs.append(curses_library)
|
||||
elif self.compiler.find_library_file(lib_dirs +
|
||||
- ['/usr/lib/termcap'],
|
||||
+ ['/usr/lib64/termcap'],
|
||||
'termcap'):
|
||||
readline_libs.append('termcap')
|
||||
exts.append( Extension('readline', ['readline.c'],
|
||||
- library_dirs=['/usr/lib/termcap'],
|
||||
+ library_dirs=['/usr/lib64/termcap'],
|
||||
extra_link_args=readline_extra_link_args,
|
||||
libraries=readline_libs) )
|
||||
else:
|
||||
@@ -821,8 +821,8 @@ class PyBuildExt(build_ext):
|
||||
if krb5_h:
|
||||
ssl_incs += krb5_h
|
||||
ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
|
||||
- ['/usr/local/ssl/lib',
|
||||
- '/usr/contrib/ssl/lib/'
|
||||
+ ['/usr/local/ssl/lib64',
|
||||
+ '/usr/contrib/ssl/lib64/'
|
||||
] )
|
||||
|
||||
if (ssl_incs is not None and
|
|
@ -0,0 +1,13 @@
|
|||
--- Python-2.7.2/Lib/distutils/tests/test_install.py.lib64 2011-09-08 17:51:57.851405376 -0400
|
||||
+++ Python-2.7.2/Lib/distutils/tests/test_install.py 2011-09-08 18:40:46.754205096 -0400
|
||||
@@ -41,8 +41,9 @@ class InstallTestCase(support.TempdirMan
|
||||
self.assertEqual(got, expected)
|
||||
|
||||
libdir = os.path.join(destination, "lib", "python")
|
||||
+ platlibdir = os.path.join(destination, "lib64", "python")
|
||||
check_path(cmd.install_lib, libdir)
|
||||
- check_path(cmd.install_platlib, libdir)
|
||||
+ check_path(cmd.install_platlib, platlibdir)
|
||||
check_path(cmd.install_purelib, libdir)
|
||||
check_path(cmd.install_headers,
|
||||
os.path.join(destination, "include", "python", "foopkg"))
|
|
@ -0,0 +1,60 @@
|
|||
diff --git a/Makefile.pre.in b/Makefile.pre.in
|
||||
index 4b093e3..1088435 100644
|
||||
--- a/Makefile.pre.in
|
||||
+++ b/Makefile.pre.in
|
||||
@@ -543,7 +543,7 @@ clinic: check-clean-src $(srcdir)/Modules/_blake2/blake2s_impl.c
|
||||
$(PYTHON_FOR_REGEN) ./Tools/clinic/clinic.py --make
|
||||
|
||||
# Build the interpreter
|
||||
-$(BUILDPYTHON): Programs/python.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY)
|
||||
+$(BUILDPYTHON): Programs/python.o $(LDLIBRARY) $(PY3LIBRARY)
|
||||
$(LINKCC) $(PY_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
|
||||
|
||||
platform: $(BUILDPYTHON) pybuilddir.txt
|
||||
@@ -588,18 +588,6 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o
|
||||
$(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build
|
||||
|
||||
|
||||
-# Build static library
|
||||
-# avoid long command lines, same as LIBRARY_OBJS
|
||||
-$(LIBRARY): $(LIBRARY_OBJS)
|
||||
- -rm -f $@
|
||||
- $(AR) $(ARFLAGS) $@ Modules/getbuildinfo.o
|
||||
- $(AR) $(ARFLAGS) $@ $(PARSER_OBJS)
|
||||
- $(AR) $(ARFLAGS) $@ $(OBJECT_OBJS)
|
||||
- $(AR) $(ARFLAGS) $@ $(PYTHON_OBJS) Python/frozen.o
|
||||
- $(AR) $(ARFLAGS) $@ $(MODULE_OBJS)
|
||||
- $(AR) $(ARFLAGS) $@ $(MODOBJS)
|
||||
- $(RANLIB) $@
|
||||
-
|
||||
libpython$(LDVERSION).so: $(LIBRARY_OBJS)
|
||||
if test $(INSTSONAME) != $(LDLIBRARY); then \
|
||||
$(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \
|
||||
@@ -689,7 +677,7 @@ Modules/Setup: $(srcdir)/Modules/Setup.dist
|
||||
echo "-----------------------------------------------"; \
|
||||
fi
|
||||
|
||||
-Programs/_testembed: Programs/_testembed.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY)
|
||||
+Programs/_testembed: Programs/_testembed.o $(LDLIBRARY) $(PY3LIBRARY)
|
||||
$(LINKCC) $(PY_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
|
||||
|
||||
############################################################################
|
||||
@@ -1425,18 +1413,6 @@ libainstall: @DEF_MAKE_RULE@ python-config
|
||||
else true; \
|
||||
fi; \
|
||||
done
|
||||
- @if test -d $(LIBRARY); then :; else \
|
||||
- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \
|
||||
- if test "$(SHLIB_SUFFIX)" = .dll; then \
|
||||
- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \
|
||||
- else \
|
||||
- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
|
||||
- $(RANLIB) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \
|
||||
- fi; \
|
||||
- else \
|
||||
- echo Skip install of $(LIBRARY) - use make frameworkinstall; \
|
||||
- fi; \
|
||||
- fi
|
||||
$(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c
|
||||
$(INSTALL_DATA) Programs/python.o $(DESTDIR)$(LIBPL)/python.o
|
||||
$(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in
|
|
@ -0,0 +1,46 @@
|
|||
diff -up Python-3.2.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest Python-3.2.2/Lib/unittest/case.py
|
||||
--- Python-3.2.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest 2011-09-03 12:16:44.000000000 -0400
|
||||
+++ Python-3.2.2/Lib/unittest/case.py 2011-09-09 06:35:16.365568382 -0400
|
||||
@@ -3,6 +3,7 @@
|
||||
import sys
|
||||
import functools
|
||||
import difflib
|
||||
+import os
|
||||
import logging
|
||||
import pprint
|
||||
import re
|
||||
@@ -101,5 +102,21 @@ def expectedFailure(func):
|
||||
raise self.test_case.failureException(msg)
|
||||
|
||||
+# Non-standard/downstream-only hooks for handling issues with specific test
|
||||
+# cases:
|
||||
+
|
||||
+def _skipInRpmBuild(reason):
|
||||
+ """
|
||||
+ Non-standard/downstream-only decorator for marking a specific unit test
|
||||
+ to be skipped when run within the %check of an rpmbuild.
|
||||
+
|
||||
+ Specifically, this takes effect when WITHIN_PYTHON_RPM_BUILD is set within
|
||||
+ the environment, and has no effect otherwise.
|
||||
+ """
|
||||
+ if 'WITHIN_PYTHON_RPM_BUILD' in os.environ:
|
||||
+ return skip(reason)
|
||||
+ else:
|
||||
+ return _id
|
||||
+
|
||||
class _AssertRaisesBaseContext(_BaseTestCaseContext):
|
||||
|
||||
def __init__(self, expected, test_case, expected_regex=None):
|
||||
diff -up Python-3.2.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest Python-3.2.2/Lib/unittest/__init__.py
|
||||
--- Python-3.2.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest 2011-09-03 12:16:44.000000000 -0400
|
||||
+++ Python-3.2.2/Lib/unittest/__init__.py 2011-09-09 06:35:16.366568382 -0400
|
||||
@@ -57,7 +57,8 @@ __unittest = True
|
||||
|
||||
from .result import TestResult
|
||||
from .case import (TestCase, FunctionTestCase, SkipTest, skip, skipIf,
|
||||
- skipUnless, expectedFailure)
|
||||
+ skipUnless, expectedFailure,
|
||||
+ _skipInRpmBuild)
|
||||
from .suite import BaseTestSuite, TestSuite
|
||||
from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
|
||||
findTestCases)
|
|
@ -0,0 +1,12 @@
|
|||
diff -up Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py.skip-distutils-tests-that-fail-in-rpmbuild Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py
|
||||
--- Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py.skip-distutils-tests-that-fail-in-rpmbuild 2011-09-03 12:16:40.000000000 -0400
|
||||
+++ Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py 2011-09-10 05:04:56.328852558 -0400
|
||||
@@ -23,6 +23,7 @@ setup(name='foo', version='0.1', py_modu
|
||||
|
||||
"""
|
||||
|
||||
+@unittest._skipInRpmBuild("don't try to nest one rpm build inside another rpm build")
|
||||
class BuildRpmTestCase(support.TempdirManager,
|
||||
support.EnvironGuard,
|
||||
support.LoggingSilencer,
|
||||
diff -up Python-3.2.2/Lib/distutils/tests/test_build_ext.py.skip-distutils-tests-that-fail-in-rpmbuild Python-3.2.2/Lib/distutils/tests/test_build_ext.py
|
|
@ -0,0 +1,640 @@
|
|||
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
|
||||
index 316cece..b7ad879 100644
|
||||
--- a/Lib/hashlib.py
|
||||
+++ b/Lib/hashlib.py
|
||||
@@ -23,6 +23,16 @@ the zlib module.
|
||||
Choose your hash function wisely. Some have known collision weaknesses.
|
||||
sha384 and sha512 will be slow on 32 bit platforms.
|
||||
|
||||
+If the underlying implementation supports "FIPS mode", and this is enabled, it
|
||||
+may restrict the available hashes to only those that are compliant with FIPS
|
||||
+regulations. For example, it may deny the use of MD5, on the grounds that this
|
||||
+is not secure for uses such as authentication, system integrity checking, or
|
||||
+digital signatures. If you need to use such a hash for non-security purposes
|
||||
+(such as indexing into a data structure for speed), you can override the keyword
|
||||
+argument "usedforsecurity" from True to False to signify that your code is not
|
||||
+relying on the hash for security purposes, and this will allow the hash to be
|
||||
+usable even in FIPS mode.
|
||||
+
|
||||
Hash objects have these methods:
|
||||
- update(arg): Update the hash object with the bytes in arg. Repeated calls
|
||||
are equivalent to a single call with the concatenation of all
|
||||
@@ -62,6 +72,18 @@ algorithms_available = set(__always_supported)
|
||||
__all__ = __always_supported + ('new', 'algorithms_guaranteed',
|
||||
'algorithms_available', 'pbkdf2_hmac')
|
||||
|
||||
+import functools
|
||||
+def __ignore_usedforsecurity(func):
|
||||
+ """Used for sha3_* functions. Until OpenSSL implements them, we want
|
||||
+ to use them from Python _sha3 module, but we want them to accept
|
||||
+ usedforsecurity argument too."""
|
||||
+ # TODO: remove this function when OpenSSL implements sha3
|
||||
+ @functools.wraps(func)
|
||||
+ def inner(*args, **kwargs):
|
||||
+ if 'usedforsecurity' in kwargs:
|
||||
+ kwargs.pop('usedforsecurity')
|
||||
+ return func(*args, **kwargs)
|
||||
+ return inner
|
||||
|
||||
__builtin_constructor_cache = {}
|
||||
|
||||
@@ -100,31 +122,39 @@ def __get_openssl_constructor(name):
|
||||
f = getattr(_hashlib, 'openssl_' + name)
|
||||
# Allow the C module to raise ValueError. The function will be
|
||||
# defined but the hash not actually available thanks to OpenSSL.
|
||||
- f()
|
||||
+ # We pass "usedforsecurity=False" to disable FIPS-based restrictions:
|
||||
+ # at this stage we're merely seeing if the function is callable,
|
||||
+ # rather than using it for actual work.
|
||||
+ f(usedforsecurity=False)
|
||||
# Use the C function directly (very fast)
|
||||
return f
|
||||
except (AttributeError, ValueError):
|
||||
+ # TODO: We want to just raise here when OpenSSL implements sha3
|
||||
+ # because we want to make sure that Fedora uses everything from OpenSSL
|
||||
return __get_builtin_constructor(name)
|
||||
|
||||
|
||||
-def __py_new(name, data=b''):
|
||||
- """new(name, data=b'') - Return a new hashing object using the named algorithm;
|
||||
- optionally initialized with data (which must be bytes).
|
||||
+def __py_new(name, data=b'', usedforsecurity=True):
|
||||
+ """new(name, data=b'', usedforsecurity=True) - Return a new hashing object using
|
||||
+ the named algorithm; optionally initialized with data (which must be bytes).
|
||||
+ The 'usedforsecurity' keyword argument does nothing, and is for compatibilty
|
||||
+ with the OpenSSL implementation
|
||||
"""
|
||||
return __get_builtin_constructor(name)(data)
|
||||
|
||||
|
||||
-def __hash_new(name, data=b''):
|
||||
- """new(name, data=b'') - Return a new hashing object using the named algorithm;
|
||||
- optionally initialized with data (which must be bytes).
|
||||
+def __hash_new(name, data=b'', usedforsecurity=True):
|
||||
+ """new(name, data=b'', usedforsecurity=True) - Return a new hashing object using
|
||||
+ the named algorithm; optionally initialized with data (which must be bytes).
|
||||
+
|
||||
+ Override 'usedforsecurity' to False when using for non-security purposes in
|
||||
+ a FIPS environment
|
||||
"""
|
||||
try:
|
||||
- return _hashlib.new(name, data)
|
||||
+ return _hashlib.new(name, data, usedforsecurity)
|
||||
except ValueError:
|
||||
- # If the _hashlib module (OpenSSL) doesn't support the named
|
||||
- # hash, try using our builtin implementations.
|
||||
- # This allows for SHA224/256 and SHA384/512 support even though
|
||||
- # the OpenSSL library prior to 0.9.8 doesn't provide them.
|
||||
+ # TODO: We want to just raise here when OpenSSL implements sha3
|
||||
+ # because we want to make sure that Fedora uses everything from OpenSSL
|
||||
return __get_builtin_constructor(name)(data)
|
||||
|
||||
|
||||
@@ -207,7 +237,10 @@ for __func_name in __always_supported:
|
||||
# try them all, some may not work due to the OpenSSL
|
||||
# version not supporting that algorithm.
|
||||
try:
|
||||
- globals()[__func_name] = __get_hash(__func_name)
|
||||
+ func = __get_hash(__func_name)
|
||||
+ if 'sha3_' in __func_name:
|
||||
+ func = __ignore_usedforsecurity(func)
|
||||
+ globals()[__func_name] = func
|
||||
except ValueError:
|
||||
import logging
|
||||
logging.exception('code for hash %s was not found.', __func_name)
|
||||
@@ -215,3 +248,4 @@ for __func_name in __always_supported:
|
||||
# Cleanup locals()
|
||||
del __always_supported, __func_name, __get_hash
|
||||
del __py_new, __hash_new, __get_openssl_constructor
|
||||
+del __ignore_usedforsecurity
|
||||
\ No newline at end of file
|
||||
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
|
||||
index c9b113e..60e2392 100644
|
||||
--- a/Lib/test/test_hashlib.py
|
||||
+++ b/Lib/test/test_hashlib.py
|
||||
@@ -24,7 +24,22 @@ from test.support import _4G, bigmemtest, import_fresh_module
|
||||
COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount')
|
||||
|
||||
c_hashlib = import_fresh_module('hashlib', fresh=['_hashlib'])
|
||||
-py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib'])
|
||||
+# skipped on Fedora, since we always use OpenSSL implementation
|
||||
+# py_hashlib = import_fresh_module('hashlib', blocked=['_hashlib'])
|
||||
+
|
||||
+def openssl_enforces_fips():
|
||||
+ # Use the "openssl" command (if present) to try to determine if the local
|
||||
+ # OpenSSL is configured to enforce FIPS
|
||||
+ from subprocess import Popen, PIPE
|
||||
+ try:
|
||||
+ p = Popen(['openssl', 'md5'],
|
||||
+ stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
||||
+ except OSError:
|
||||
+ # "openssl" command not found
|
||||
+ return False
|
||||
+ stdout, stderr = p.communicate(input=b'abc')
|
||||
+ return b'unknown cipher' in stderr
|
||||
+OPENSSL_ENFORCES_FIPS = openssl_enforces_fips()
|
||||
|
||||
def hexstr(s):
|
||||
assert isinstance(s, bytes), repr(s)
|
||||
@@ -34,6 +49,16 @@ def hexstr(s):
|
||||
r += h[(i >> 4) & 0xF] + h[i & 0xF]
|
||||
return r
|
||||
|
||||
+# hashlib and _hashlib-based functions support a "usedforsecurity" keyword
|
||||
+# argument, and FIPS mode requires that it be used overridden with a False
|
||||
+# value for these selftests to work. Other cryptographic code within Python
|
||||
+# doesn't support this keyword.
|
||||
+# Modify a function to one in which "usedforsecurity=False" is added to the
|
||||
+# keyword arguments:
|
||||
+def suppress_fips(f):
|
||||
+ def g(*args, **kwargs):
|
||||
+ return f(*args, usedforsecurity=False, **kwargs)
|
||||
+ return g
|
||||
|
||||
class HashLibTestCase(unittest.TestCase):
|
||||
supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
|
||||
@@ -63,11 +88,11 @@ class HashLibTestCase(unittest.TestCase):
|
||||
# For each algorithm, test the direct constructor and the use
|
||||
# of hashlib.new given the algorithm name.
|
||||
for algorithm, constructors in self.constructors_to_test.items():
|
||||
- constructors.add(getattr(hashlib, algorithm))
|
||||
+ constructors.add(suppress_fips(getattr(hashlib, algorithm)))
|
||||
def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm):
|
||||
if data is None:
|
||||
- return hashlib.new(_alg)
|
||||
- return hashlib.new(_alg, data)
|
||||
+ return suppress_fips(hashlib.new)(_alg)
|
||||
+ return suppress_fips(hashlib.new)(_alg, data)
|
||||
constructors.add(_test_algorithm_via_hashlib_new)
|
||||
|
||||
_hashlib = self._conditional_import_module('_hashlib')
|
||||
@@ -79,27 +104,12 @@ class HashLibTestCase(unittest.TestCase):
|
||||
for algorithm, constructors in self.constructors_to_test.items():
|
||||
constructor = getattr(_hashlib, 'openssl_'+algorithm, None)
|
||||
if constructor:
|
||||
- constructors.add(constructor)
|
||||
+ constructors.add(suppress_fips(constructor))
|
||||
|
||||
def add_builtin_constructor(name):
|
||||
constructor = getattr(hashlib, "__get_builtin_constructor")(name)
|
||||
self.constructors_to_test[name].add(constructor)
|
||||
|
||||
- _md5 = self._conditional_import_module('_md5')
|
||||
- if _md5:
|
||||
- add_builtin_constructor('md5')
|
||||
- _sha1 = self._conditional_import_module('_sha1')
|
||||
- if _sha1:
|
||||
- add_builtin_constructor('sha1')
|
||||
- _sha256 = self._conditional_import_module('_sha256')
|
||||
- if _sha256:
|
||||
- add_builtin_constructor('sha224')
|
||||
- add_builtin_constructor('sha256')
|
||||
- _sha512 = self._conditional_import_module('_sha512')
|
||||
- if _sha512:
|
||||
- add_builtin_constructor('sha384')
|
||||
- add_builtin_constructor('sha512')
|
||||
-
|
||||
super(HashLibTestCase, self).__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
@@ -148,9 +158,6 @@ class HashLibTestCase(unittest.TestCase):
|
||||
else:
|
||||
del sys.modules['_md5']
|
||||
self.assertRaises(TypeError, get_builtin_constructor, 3)
|
||||
- constructor = get_builtin_constructor('md5')
|
||||
- self.assertIs(constructor, _md5.md5)
|
||||
- self.assertEqual(sorted(builtin_constructor_cache), ['MD5', 'md5'])
|
||||
|
||||
def test_hexdigest(self):
|
||||
for cons in self.hash_constructors:
|
||||
@@ -433,6 +440,64 @@ class HashLibTestCase(unittest.TestCase):
|
||||
|
||||
self.assertEqual(expected_hash, hasher.hexdigest())
|
||||
|
||||
+ def test_issue9146(self):
|
||||
+ # Ensure that various ways to use "MD5" from "hashlib" don't segfault:
|
||||
+ m = hashlib.md5(usedforsecurity=False)
|
||||
+ m.update(b'abc\n')
|
||||
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||
+
|
||||
+ m = hashlib.new('md5', usedforsecurity=False)
|
||||
+ m.update(b'abc\n')
|
||||
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||
+
|
||||
+ m = hashlib.md5(b'abc\n', usedforsecurity=False)
|
||||
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||
+
|
||||
+ m = hashlib.new('md5', b'abc\n', usedforsecurity=False)
|
||||
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||
+
|
||||
+ @unittest.skipUnless(OPENSSL_ENFORCES_FIPS,
|
||||
+ 'FIPS enforcement required for this test.')
|
||||
+ def test_hashlib_fips_mode(self):
|
||||
+ # Ensure that we raise a ValueError on vanilla attempts to use MD5
|
||||
+ # in hashlib in a FIPS-enforced setting:
|
||||
+ with self.assertRaisesRegexp(ValueError, '.*unknown cipher'):
|
||||
+ m = hashlib.md5()
|
||||
+
|
||||
+ if not self._conditional_import_module('_md5'):
|
||||
+ with self.assertRaisesRegexp(ValueError, '.*unknown cipher'):
|
||||
+ m = hashlib.new('md5')
|
||||
+
|
||||
+ @unittest.skipUnless(OPENSSL_ENFORCES_FIPS,
|
||||
+ 'FIPS enforcement required for this test.')
|
||||
+ def test_hashopenssl_fips_mode(self):
|
||||
+ # Verify the _hashlib module's handling of md5:
|
||||
+ _hashlib = self._conditional_import_module('_hashlib')
|
||||
+ if _hashlib:
|
||||
+ assert hasattr(_hashlib, 'openssl_md5')
|
||||
+
|
||||
+ # Ensure that _hashlib raises a ValueError on vanilla attempts to
|
||||
+ # use MD5 in a FIPS-enforced setting:
|
||||
+ with self.assertRaisesRegexp(ValueError, '.*unknown cipher'):
|
||||
+ m = _hashlib.openssl_md5()
|
||||
+ with self.assertRaisesRegexp(ValueError, '.*unknown cipher'):
|
||||
+ m = _hashlib.new('md5')
|
||||
+
|
||||
+ # Ensure that in such a setting we can whitelist a callsite with
|
||||
+ # usedforsecurity=False and have it succeed:
|
||||
+ m = _hashlib.openssl_md5(usedforsecurity=False)
|
||||
+ m.update(b'abc\n')
|
||||
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||
+
|
||||
+ m = _hashlib.new('md5', usedforsecurity=False)
|
||||
+ m.update(b'abc\n')
|
||||
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||
+
|
||||
+ m = _hashlib.openssl_md5(b'abc\n', usedforsecurity=False)
|
||||
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||
+
|
||||
+ m = _hashlib.new('md5', b'abc\n', usedforsecurity=False)
|
||||
+ self.assertEquals(m.hexdigest(), "0bee89b07a248e27c83fc3d5951213c1")
|
||||
|
||||
class KDFTests(unittest.TestCase):
|
||||
|
||||
@@ -516,7 +581,7 @@ class KDFTests(unittest.TestCase):
|
||||
out = pbkdf2(hash_name='sha1', password=b'password', salt=b'salt',
|
||||
iterations=1, dklen=None)
|
||||
self.assertEqual(out, self.pbkdf2_results['sha1'][0][0])
|
||||
-
|
||||
+ @unittest.skip('skipped on Fedora, as we always use OpenSSL pbkdf2_hmac')
|
||||
def test_pbkdf2_hmac_py(self):
|
||||
self._test_pbkdf2_hmac(py_hashlib.pbkdf2_hmac)
|
||||
|
||||
diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c
|
||||
index 44765ac..b8cf490 100644
|
||||
--- a/Modules/_hashopenssl.c
|
||||
+++ b/Modules/_hashopenssl.c
|
||||
@@ -20,6 +20,8 @@
|
||||
|
||||
|
||||
/* EVP is the preferred interface to hashing in OpenSSL */
|
||||
+#include <openssl/ssl.h>
|
||||
+#include <openssl/err.h>
|
||||
#include <openssl/evp.h>
|
||||
#include <openssl/hmac.h>
|
||||
/* We use the object interface to discover what hashes OpenSSL supports. */
|
||||
@@ -45,11 +47,19 @@ typedef struct {
|
||||
|
||||
static PyTypeObject EVPtype;
|
||||
|
||||
+/* Struct to hold all the cached information we need on a specific algorithm.
|
||||
+ We have one of these per algorithm */
|
||||
+typedef struct {
|
||||
+ PyObject *name_obj;
|
||||
+ EVP_MD_CTX ctxs[2];
|
||||
+ /* ctx_ptrs will point to ctxs unless an error occurred, when it will
|
||||
+ be NULL: */
|
||||
+ EVP_MD_CTX *ctx_ptrs[2];
|
||||
+ PyObject *error_msgs[2];
|
||||
+} EVPCachedInfo;
|
||||
|
||||
-#define DEFINE_CONSTS_FOR_NEW(Name) \
|
||||
- static PyObject *CONST_ ## Name ## _name_obj = NULL; \
|
||||
- static EVP_MD_CTX CONST_new_ ## Name ## _ctx; \
|
||||
- static EVP_MD_CTX *CONST_new_ ## Name ## _ctx_p = NULL;
|
||||
+#define DEFINE_CONSTS_FOR_NEW(Name) \
|
||||
+ static EVPCachedInfo cached_info_ ##Name;
|
||||
|
||||
DEFINE_CONSTS_FOR_NEW(md5)
|
||||
DEFINE_CONSTS_FOR_NEW(sha1)
|
||||
@@ -92,6 +102,48 @@ EVP_hash(EVPobject *self, const void *vp, Py_ssize_t len)
|
||||
}
|
||||
}
|
||||
|
||||
+static void
|
||||
+mc_ctx_init(EVP_MD_CTX *ctx, int usedforsecurity)
|
||||
+{
|
||||
+ EVP_MD_CTX_init(ctx);
|
||||
+
|
||||
+ /*
|
||||
+ If the user has declared that this digest is being used in a
|
||||
+ non-security role (e.g. indexing into a data structure), set
|
||||
+ the exception flag for openssl to allow it
|
||||
+ */
|
||||
+ if (!usedforsecurity) {
|
||||
+#ifdef EVP_MD_CTX_FLAG_NON_FIPS_ALLOW
|
||||
+ EVP_MD_CTX_set_flags(ctx,
|
||||
+ EVP_MD_CTX_FLAG_NON_FIPS_ALLOW);
|
||||
+#endif
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+/* Get an error msg for the last error as a PyObject */
|
||||
+static PyObject *
|
||||
+error_msg_for_last_error(void)
|
||||
+{
|
||||
+ char *errstr;
|
||||
+
|
||||
+ errstr = ERR_error_string(ERR_peek_last_error(), NULL);
|
||||
+ ERR_clear_error();
|
||||
+
|
||||
+ return PyUnicode_FromString(errstr); /* Can be NULL */
|
||||
+}
|
||||
+
|
||||
+static void
|
||||
+set_evp_exception(void)
|
||||
+{
|
||||
+ char *errstr;
|
||||
+
|
||||
+ errstr = ERR_error_string(ERR_peek_last_error(), NULL);
|
||||
+ ERR_clear_error();
|
||||
+
|
||||
+ PyErr_SetString(PyExc_ValueError, errstr);
|
||||
+}
|
||||
+
|
||||
+
|
||||
/* Internal methods for a hash object */
|
||||
|
||||
static void
|
||||
@@ -259,15 +311,16 @@ EVP_repr(EVPobject *self)
|
||||
static int
|
||||
EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
- static char *kwlist[] = {"name", "string", NULL};
|
||||
+ static char *kwlist[] = {"name", "string", "usedforsecurity", NULL};
|
||||
PyObject *name_obj = NULL;
|
||||
PyObject *data_obj = NULL;
|
||||
+ int usedforsecurity = 1;
|
||||
Py_buffer view;
|
||||
char *nameStr;
|
||||
const EVP_MD *digest;
|
||||
|
||||
- if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|O:HASH", kwlist,
|
||||
- &name_obj, &data_obj)) {
|
||||
+ if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|Oi:HASH", kwlist,
|
||||
+ &name_obj, &data_obj, &usedforsecurity)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
@@ -288,7 +341,12 @@ EVP_tp_init(EVPobject *self, PyObject *args, PyObject *kwds)
|
||||
PyBuffer_Release(&view);
|
||||
return -1;
|
||||
}
|
||||
- EVP_DigestInit(&self->ctx, digest);
|
||||
+ mc_ctx_init(&self->ctx, usedforsecurity);
|
||||
+ if (!EVP_DigestInit_ex(&self->ctx, digest, NULL)) {
|
||||
+ set_evp_exception();
|
||||
+ PyBuffer_Release(&view);
|
||||
+ return -1;
|
||||
+ }
|
||||
|
||||
self->name = name_obj;
|
||||
Py_INCREF(self->name);
|
||||
@@ -372,7 +430,8 @@ static PyTypeObject EVPtype = {
|
||||
static PyObject *
|
||||
EVPnew(PyObject *name_obj,
|
||||
const EVP_MD *digest, const EVP_MD_CTX *initial_ctx,
|
||||
- const unsigned char *cp, Py_ssize_t len)
|
||||
+ const unsigned char *cp, Py_ssize_t len,
|
||||
+ int usedforsecurity)
|
||||
{
|
||||
EVPobject *self;
|
||||
|
||||
@@ -387,7 +446,12 @@ EVPnew(PyObject *name_obj,
|
||||
if (initial_ctx) {
|
||||
EVP_MD_CTX_copy(&self->ctx, initial_ctx);
|
||||
} else {
|
||||
- EVP_DigestInit(&self->ctx, digest);
|
||||
+ mc_ctx_init(&self->ctx, usedforsecurity);
|
||||
+ if (!EVP_DigestInit_ex(&self->ctx, digest, NULL)) {
|
||||
+ set_evp_exception();
|
||||
+ Py_DECREF(self);
|
||||
+ return NULL;
|
||||
+ }
|
||||
}
|
||||
|
||||
if (cp && len) {
|
||||
@@ -411,21 +475,29 @@ PyDoc_STRVAR(EVP_new__doc__,
|
||||
An optional string argument may be provided and will be\n\
|
||||
automatically hashed.\n\
|
||||
\n\
|
||||
-The MD5 and SHA1 algorithms are always supported.\n");
|
||||
+The MD5 and SHA1 algorithms are always supported.\n\
|
||||
+\n\
|
||||
+An optional \"usedforsecurity=True\" keyword argument is provided for use in\n\
|
||||
+environments that enforce FIPS-based restrictions. Some implementations of\n\
|
||||
+OpenSSL can be configured to prevent the usage of non-secure algorithms (such\n\
|
||||
+as MD5). If you have a non-security use for these algorithms (e.g. a hash\n\
|
||||
+table), you can override this argument by marking the callsite as\n\
|
||||
+\"usedforsecurity=False\".");
|
||||
|
||||
static PyObject *
|
||||
EVP_new(PyObject *self, PyObject *args, PyObject *kwdict)
|
||||
{
|
||||
- static char *kwlist[] = {"name", "string", NULL};
|
||||
+ static char *kwlist[] = {"name", "string", "usedforsecurity", NULL};
|
||||
PyObject *name_obj = NULL;
|
||||
PyObject *data_obj = NULL;
|
||||
+ int usedforsecurity = 1;
|
||||
Py_buffer view = { 0 };
|
||||
PyObject *ret_obj;
|
||||
char *name;
|
||||
const EVP_MD *digest;
|
||||
|
||||
- if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|O:new", kwlist,
|
||||
- &name_obj, &data_obj)) {
|
||||
+ if (!PyArg_ParseTupleAndKeywords(args, kwdict, "O|Oi:new", kwlist,
|
||||
+ &name_obj, &data_obj, &usedforsecurity)) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
@@ -439,7 +511,8 @@ EVP_new(PyObject *self, PyObject *args, PyObject *kwdict)
|
||||
|
||||
digest = EVP_get_digestbyname(name);
|
||||
|
||||
- ret_obj = EVPnew(name_obj, digest, NULL, (unsigned char*)view.buf, view.len);
|
||||
+ ret_obj = EVPnew(name_obj, digest, NULL, (unsigned char*)view.buf, view.len,
|
||||
+ usedforsecurity);
|
||||
|
||||
if (data_obj)
|
||||
PyBuffer_Release(&view);
|
||||
@@ -722,57 +795,114 @@ generate_hash_name_list(void)
|
||||
|
||||
|
||||
/*
|
||||
- * This macro generates constructor function definitions for specific
|
||||
- * hash algorithms. These constructors are much faster than calling
|
||||
- * the generic one passing it a python string and are noticably
|
||||
- * faster than calling a python new() wrapper. Thats important for
|
||||
+ * This macro and function generates a family of constructor function
|
||||
+ * definitions for specific hash algorithms. These constructors are much
|
||||
+ * faster than calling the generic one passing it a python string and are
|
||||
+ * noticably faster than calling a python new() wrapper. That's important for
|
||||
* code that wants to make hashes of a bunch of small strings.
|
||||
*/
|
||||
#define GEN_CONSTRUCTOR(NAME) \
|
||||
static PyObject * \
|
||||
- EVP_new_ ## NAME (PyObject *self, PyObject *args) \
|
||||
+ EVP_new_ ## NAME (PyObject *self, PyObject *args, PyObject *kwdict) \
|
||||
{ \
|
||||
- PyObject *data_obj = NULL; \
|
||||
- Py_buffer view = { 0 }; \
|
||||
- PyObject *ret_obj; \
|
||||
- \
|
||||
- if (!PyArg_ParseTuple(args, "|O:" #NAME , &data_obj)) { \
|
||||
- return NULL; \
|
||||
- } \
|
||||
- \
|
||||
- if (data_obj) \
|
||||
- GET_BUFFER_VIEW_OR_ERROUT(data_obj, &view); \
|
||||
- \
|
||||
- ret_obj = EVPnew( \
|
||||
- CONST_ ## NAME ## _name_obj, \
|
||||
- NULL, \
|
||||
- CONST_new_ ## NAME ## _ctx_p, \
|
||||
- (unsigned char*)view.buf, \
|
||||
- view.len); \
|
||||
- \
|
||||
- if (data_obj) \
|
||||
- PyBuffer_Release(&view); \
|
||||
- return ret_obj; \
|
||||
+ return implement_specific_EVP_new(self, args, kwdict, \
|
||||
+ "|Oi:" #NAME, \
|
||||
+ &cached_info_ ## NAME ); \
|
||||
}
|
||||
|
||||
+static PyObject *
|
||||
+implement_specific_EVP_new(PyObject *self, PyObject *args, PyObject *kwdict,
|
||||
+ const char *format,
|
||||
+ EVPCachedInfo *cached_info)
|
||||
+{
|
||||
+ static char *kwlist[] = {"string", "usedforsecurity", NULL};
|
||||
+ PyObject *data_obj = NULL;
|
||||
+ Py_buffer view = { 0 };
|
||||
+ int usedforsecurity = 1;
|
||||
+ int idx;
|
||||
+ PyObject *ret_obj = NULL;
|
||||
+
|
||||
+ assert(cached_info);
|
||||
+
|
||||
+ if (!PyArg_ParseTupleAndKeywords(args, kwdict, format, kwlist,
|
||||
+ &data_obj, &usedforsecurity)) {
|
||||
+ return NULL;
|
||||
+ }
|
||||
+
|
||||
+ if (data_obj)
|
||||
+ GET_BUFFER_VIEW_OR_ERROUT(data_obj, &view);
|
||||
+
|
||||
+ idx = usedforsecurity ? 1 : 0;
|
||||
+
|
||||
+ /*
|
||||
+ * If an error occurred during creation of the global content, the ctx_ptr
|
||||
+ * will be NULL, and the error_msg will hopefully be non-NULL:
|
||||
+ */
|
||||
+ if (cached_info->ctx_ptrs[idx]) {
|
||||
+ /* We successfully initialized this context; copy it: */
|
||||
+ ret_obj = EVPnew(cached_info->name_obj,
|
||||
+ NULL,
|
||||
+ cached_info->ctx_ptrs[idx],
|
||||
+ (unsigned char*)view.buf, view.len,
|
||||
+ usedforsecurity);
|
||||
+ } else {
|
||||
+ /* Some kind of error happened initializing the global context for
|
||||
+ this (digest, usedforsecurity) pair.
|
||||
+ Raise an exception with the saved error message: */
|
||||
+ if (cached_info->error_msgs[idx]) {
|
||||
+ PyErr_SetObject(PyExc_ValueError, cached_info->error_msgs[idx]);
|
||||
+ } else {
|
||||
+ PyErr_SetString(PyExc_ValueError, "Error initializing hash");
|
||||
+ }
|
||||
+ }
|
||||
+
|
||||
+ if (data_obj)
|
||||
+ PyBuffer_Release(&view);
|
||||
+
|
||||
+ return ret_obj;
|
||||
+}
|
||||
+
|
||||
/* a PyMethodDef structure for the constructor */
|
||||
#define CONSTRUCTOR_METH_DEF(NAME) \
|
||||
- {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, METH_VARARGS, \
|
||||
+ {"openssl_" #NAME, (PyCFunction)EVP_new_ ## NAME, \
|
||||
+ METH_VARARGS|METH_KEYWORDS, \
|
||||
PyDoc_STR("Returns a " #NAME \
|
||||
" hash object; optionally initialized with a string") \
|
||||
}
|
||||
|
||||
-/* used in the init function to setup a constructor: initialize OpenSSL
|
||||
- constructor constants if they haven't been initialized already. */
|
||||
-#define INIT_CONSTRUCTOR_CONSTANTS(NAME) do { \
|
||||
- if (CONST_ ## NAME ## _name_obj == NULL) { \
|
||||
- CONST_ ## NAME ## _name_obj = PyUnicode_FromString(#NAME); \
|
||||
- if (EVP_get_digestbyname(#NAME)) { \
|
||||
- CONST_new_ ## NAME ## _ctx_p = &CONST_new_ ## NAME ## _ctx; \
|
||||
- EVP_DigestInit(CONST_new_ ## NAME ## _ctx_p, EVP_get_digestbyname(#NAME)); \
|
||||
- } \
|
||||
- } \
|
||||
+/*
|
||||
+ Macro/function pair to set up the constructors.
|
||||
+
|
||||
+ Try to initialize a context for each hash twice, once with
|
||||
+ EVP_MD_CTX_FLAG_NON_FIPS_ALLOW and once without.
|
||||
+
|
||||
+ Any that have errors during initialization will end up with a NULL ctx_ptrs
|
||||
+ entry, and err_msgs will be set (unless we're very low on memory)
|
||||
+*/
|
||||
+#define INIT_CONSTRUCTOR_CONSTANTS(NAME) do { \
|
||||
+ init_constructor_constant(&cached_info_ ## NAME, #NAME); \
|
||||
} while (0);
|
||||
+static void
|
||||
+init_constructor_constant(EVPCachedInfo *cached_info, const char *name)
|
||||
+{
|
||||
+ assert(cached_info);
|
||||
+ cached_info->name_obj = PyUnicode_FromString(name);
|
||||
+ if (EVP_get_digestbyname(name)) {
|
||||
+ int i;
|
||||
+ for (i=0; i<2; i++) {
|
||||
+ mc_ctx_init(&cached_info->ctxs[i], i);
|
||||
+ if (EVP_DigestInit_ex(&cached_info->ctxs[i],
|
||||
+ EVP_get_digestbyname(name), NULL)) {
|
||||
+ /* Success: */
|
||||
+ cached_info->ctx_ptrs[i] = &cached_info->ctxs[i];
|
||||
+ } else {
|
||||
+ /* Failure: */
|
||||
+ cached_info->ctx_ptrs[i] = NULL;
|
||||
+ cached_info->error_msgs[i] = error_msg_for_last_error();
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+}
|
||||
|
||||
GEN_CONSTRUCTOR(md5)
|
||||
GEN_CONSTRUCTOR(sha1)
|
||||
@@ -819,13 +949,10 @@ PyInit__hashlib(void)
|
||||
{
|
||||
PyObject *m, *openssl_md_meth_names;
|
||||
|
||||
- OpenSSL_add_all_digests();
|
||||
- ERR_load_crypto_strings();
|
||||
+ SSL_load_error_strings();
|
||||
+ SSL_library_init();
|
||||
|
||||
- /* TODO build EVP_functions openssl_* entries dynamically based
|
||||
- * on what hashes are supported rather than listing many
|
||||
- * but having some be unsupported. Only init appropriate
|
||||
- * constants. */
|
||||
+ OpenSSL_add_all_digests();
|
||||
|
||||
Py_TYPE(&EVPtype) = &PyType_Type;
|
||||
if (PyType_Ready(&EVPtype) < 0)
|
|
@ -0,0 +1,15 @@
|
|||
diff -up Python-3.2.3/Lib/ctypes/__init__.py.rhbz814391 Python-3.2.3/Lib/ctypes/__init__.py
|
||||
--- Python-3.2.3/Lib/ctypes/__init__.py.rhbz814391 2012-04-20 15:12:49.017867692 -0400
|
||||
+++ Python-3.2.3/Lib/ctypes/__init__.py 2012-04-20 15:15:09.501111408 -0400
|
||||
@@ -275,11 +275,6 @@ def _reset_cache():
|
||||
# _SimpleCData.c_char_p_from_param
|
||||
POINTER(c_char).from_param = c_char_p.from_param
|
||||
_pointer_type_cache[None] = c_void_p
|
||||
- # XXX for whatever reasons, creating the first instance of a callback
|
||||
- # function is needed for the unittests on Win64 to succeed. This MAY
|
||||
- # be a compiler bug, since the problem occurs only when _ctypes is
|
||||
- # compiled with the MS SDK compiler. Or an uninitialized variable?
|
||||
- CFUNCTYPE(c_int)(lambda: None)
|
||||
|
||||
def create_unicode_buffer(init, size=None):
|
||||
"""create_unicode_buffer(aString) -> character array
|
|
@ -0,0 +1,68 @@
|
|||
diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py
|
||||
index e9fdb07..ea60e6e 100644
|
||||
--- a/Lib/test/test_os.py
|
||||
+++ b/Lib/test/test_os.py
|
||||
@@ -1723,30 +1723,36 @@ class PosixUidGidTests(unittest.TestCase):
|
||||
def test_setuid(self):
|
||||
if os.getuid() != 0:
|
||||
self.assertRaises(OSError, os.setuid, 0)
|
||||
+ self.assertRaises(TypeError, os.setuid, 'not an int')
|
||||
self.assertRaises(OverflowError, os.setuid, 1<<32)
|
||||
|
||||
@unittest.skipUnless(hasattr(os, 'setgid'), 'test needs os.setgid()')
|
||||
def test_setgid(self):
|
||||
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
|
||||
self.assertRaises(OSError, os.setgid, 0)
|
||||
+ self.assertRaises(TypeError, os.setgid, 'not an int')
|
||||
self.assertRaises(OverflowError, os.setgid, 1<<32)
|
||||
|
||||
@unittest.skipUnless(hasattr(os, 'seteuid'), 'test needs os.seteuid()')
|
||||
def test_seteuid(self):
|
||||
if os.getuid() != 0:
|
||||
self.assertRaises(OSError, os.seteuid, 0)
|
||||
+ self.assertRaises(TypeError, os.seteuid, 'not an int')
|
||||
self.assertRaises(OverflowError, os.seteuid, 1<<32)
|
||||
|
||||
@unittest.skipUnless(hasattr(os, 'setegid'), 'test needs os.setegid()')
|
||||
def test_setegid(self):
|
||||
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
|
||||
self.assertRaises(OSError, os.setegid, 0)
|
||||
+ self.assertRaises(TypeError, os.setegid, 'not an int')
|
||||
self.assertRaises(OverflowError, os.setegid, 1<<32)
|
||||
|
||||
@unittest.skipUnless(hasattr(os, 'setreuid'), 'test needs os.setreuid()')
|
||||
def test_setreuid(self):
|
||||
if os.getuid() != 0:
|
||||
self.assertRaises(OSError, os.setreuid, 0, 0)
|
||||
+ self.assertRaises(TypeError, os.setreuid, 'not an int', 0)
|
||||
+ self.assertRaises(TypeError, os.setreuid, 0, 'not an int')
|
||||
self.assertRaises(OverflowError, os.setreuid, 1<<32, 0)
|
||||
self.assertRaises(OverflowError, os.setreuid, 0, 1<<32)
|
||||
|
||||
@@ -1762,6 +1768,8 @@ class PosixUidGidTests(unittest.TestCase):
|
||||
def test_setregid(self):
|
||||
if os.getuid() != 0 and not HAVE_WHEEL_GROUP:
|
||||
self.assertRaises(OSError, os.setregid, 0, 0)
|
||||
+ self.assertRaises(TypeError, os.setregid, 'not an int', 0)
|
||||
+ self.assertRaises(TypeError, os.setregid, 0, 'not an int')
|
||||
self.assertRaises(OverflowError, os.setregid, 1<<32, 0)
|
||||
self.assertRaises(OverflowError, os.setregid, 0, 1<<32)
|
||||
|
||||
diff --git a/Lib/test/test_pwd.py b/Lib/test/test_pwd.py
|
||||
index ac9cff7..db98159 100644
|
||||
--- a/Lib/test/test_pwd.py
|
||||
+++ b/Lib/test/test_pwd.py
|
||||
@@ -104,11 +104,11 @@ class PwdTest(unittest.TestCase):
|
||||
# In some cases, byuids isn't a complete list of all users in the
|
||||
# system, so if we try to pick a value not in byuids (via a perturbing
|
||||
# loop, say), pwd.getpwuid() might still be able to find data for that
|
||||
- # uid. Using sys.maxint may provoke the same problems, but hopefully
|
||||
+ # uid. Using 2**32 - 2 may provoke the same problems, but hopefully
|
||||
# it will be a more repeatable failure.
|
||||
# Android accepts a very large span of uids including sys.maxsize and
|
||||
# -1; it raises KeyError with 1 or 2 for example.
|
||||
- fakeuid = sys.maxsize
|
||||
+ fakeuid = 2**32 - 2
|
||||
self.assertNotIn(fakeuid, byuids)
|
||||
if not support.is_android:
|
||||
self.assertRaises(KeyError, pwd.getpwuid, fakeuid)
|
|
@ -0,0 +1,11 @@
|
|||
diff -up cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build cpython-59223da36dec/Lib/test/test_posix.py
|
||||
--- cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build 2012-08-07 17:15:59.000000000 -0400
|
||||
+++ cpython-59223da36dec/Lib/test/test_posix.py 2012-08-07 17:16:53.528330330 -0400
|
||||
@@ -973,6 +973,7 @@ class PosixTester(unittest.TestCase):
|
||||
posix.RTLD_GLOBAL
|
||||
posix.RTLD_LOCAL
|
||||
|
||||
+ @unittest._skipInRpmBuild('running kernel may not match kernel in chroot')
|
||||
@unittest.skipUnless(hasattr(os, 'SEEK_HOLE'),
|
||||
"test needs an OS that reports file holes")
|
||||
def test_fs_holes(self):
|
|
@ -0,0 +1,11 @@
|
|||
diff -up Python-3.3.0b1/Lib/test/test_socket.py.disable-test_socket-in-rpm-builds Python-3.3.0b1/Lib/test/test_socket.py
|
||||
--- Python-3.3.0b1/Lib/test/test_socket.py.disable-test_socket-in-rpm-builds 2012-07-24 15:02:30.823355067 -0400
|
||||
+++ Python-3.3.0b1/Lib/test/test_socket.py 2012-07-24 15:08:13.021354999 -0400
|
||||
@@ -2188,6 +2188,7 @@ class RecvmsgGenericStreamTests(RecvmsgG
|
||||
# Tests which require a stream socket and can use either recvmsg()
|
||||
# or recvmsg_into().
|
||||
|
||||
+ @unittest._skipInRpmBuild('fails intermittently when run within Koji')
|
||||
def testRecvmsgEOF(self):
|
||||
# Receive end-of-stream indicator (b"", peer socket closed).
|
||||
msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock, 1024)
|
|
@ -0,0 +1,310 @@
|
|||
diff --git a/Include/object.h b/Include/object.h
|
||||
index 0c88603..e3413e8 100644
|
||||
--- a/Include/object.h
|
||||
+++ b/Include/object.h
|
||||
@@ -1059,6 +1059,49 @@ PyAPI_FUNC(void)
|
||||
_PyObject_DebugTypeStats(FILE *out);
|
||||
#endif /* ifndef Py_LIMITED_API */
|
||||
|
||||
+/*
|
||||
+ Define a pair of assertion macros.
|
||||
+
|
||||
+ These work like the regular C assert(), in that they will abort the
|
||||
+ process with a message on stderr if the given condition fails to hold,
|
||||
+ but compile away to nothing if NDEBUG is defined.
|
||||
+
|
||||
+ However, before aborting, Python will also try to call _PyObject_Dump() on
|
||||
+ the given object. This may be of use when investigating bugs in which a
|
||||
+ particular object is corrupt (e.g. buggy a tp_visit method in an extension
|
||||
+ module breaking the garbage collector), to help locate the broken objects.
|
||||
+
|
||||
+ The WITH_MSG variant allows you to supply an additional message that Python
|
||||
+ will attempt to print to stderr, after the object dump.
|
||||
+*/
|
||||
+#ifdef NDEBUG
|
||||
+/* No debugging: compile away the assertions: */
|
||||
+#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) ((void)0)
|
||||
+#else
|
||||
+/* With debugging: generate checks: */
|
||||
+#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) \
|
||||
+ ((expr) \
|
||||
+ ? (void)(0) \
|
||||
+ : _PyObject_AssertFailed((obj), \
|
||||
+ (msg), \
|
||||
+ (__STRING(expr)), \
|
||||
+ (__FILE__), \
|
||||
+ (__LINE__), \
|
||||
+ (__PRETTY_FUNCTION__)))
|
||||
+#endif
|
||||
+
|
||||
+#define PyObject_ASSERT(obj, expr) \
|
||||
+ PyObject_ASSERT_WITH_MSG(obj, expr, NULL)
|
||||
+
|
||||
+/*
|
||||
+ Declare and define the entrypoint even when NDEBUG is defined, to avoid
|
||||
+ causing compiler/linker errors when building extensions without NDEBUG
|
||||
+ against a Python built with NDEBUG defined
|
||||
+*/
|
||||
+PyAPI_FUNC(void) _PyObject_AssertFailed(PyObject *, const char *,
|
||||
+ const char *, const char *, int,
|
||||
+ const char *);
|
||||
+
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py
|
||||
index e727499..6efcafb 100644
|
||||
--- a/Lib/test/test_gc.py
|
||||
+++ b/Lib/test/test_gc.py
|
||||
@@ -1,10 +1,11 @@
|
||||
import unittest
|
||||
from test.support import (verbose, refcount_test, run_unittest,
|
||||
strip_python_stderr, cpython_only, start_threads,
|
||||
- temp_dir, requires_type_collecting)
|
||||
+ temp_dir, import_module, requires_type_collecting)
|
||||
from test.support.script_helper import assert_python_ok, make_script
|
||||
|
||||
import sys
|
||||
+import sysconfig
|
||||
import time
|
||||
import gc
|
||||
import weakref
|
||||
@@ -50,6 +51,8 @@ class GC_Detector(object):
|
||||
# gc collects it.
|
||||
self.wr = weakref.ref(C1055820(666), it_happened)
|
||||
|
||||
+BUILD_WITH_NDEBUG = ('-DNDEBUG' in sysconfig.get_config_vars()['PY_CFLAGS'])
|
||||
+
|
||||
@with_tp_del
|
||||
class Uncollectable(object):
|
||||
"""Create a reference cycle with multiple __del__ methods.
|
||||
@@ -862,6 +865,50 @@ class GCCallbackTests(unittest.TestCase):
|
||||
self.assertEqual(len(gc.garbage), 0)
|
||||
|
||||
|
||||
+ @unittest.skipIf(BUILD_WITH_NDEBUG,
|
||||
+ 'built with -NDEBUG')
|
||||
+ def test_refcount_errors(self):
|
||||
+ self.preclean()
|
||||
+ # Verify the "handling" of objects with broken refcounts
|
||||
+ import_module("ctypes") #skip if not supported
|
||||
+
|
||||
+ import subprocess
|
||||
+ code = '''if 1:
|
||||
+ a = []
|
||||
+ b = [a]
|
||||
+
|
||||
+ # Simulate the refcount of "a" being too low (compared to the
|
||||
+ # references held on it by live data), but keeping it above zero
|
||||
+ # (to avoid deallocating it):
|
||||
+ import ctypes
|
||||
+ ctypes.pythonapi.Py_DecRef(ctypes.py_object(a))
|
||||
+
|
||||
+ # The garbage collector should now have a fatal error when it reaches
|
||||
+ # the broken object:
|
||||
+ import gc
|
||||
+ gc.collect()
|
||||
+ '''
|
||||
+ p = subprocess.Popen([sys.executable, "-c", code],
|
||||
+ stdout=subprocess.PIPE,
|
||||
+ stderr=subprocess.PIPE)
|
||||
+ stdout, stderr = p.communicate()
|
||||
+ p.stdout.close()
|
||||
+ p.stderr.close()
|
||||
+ # Verify that stderr has a useful error message:
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'Modules/gcmodule.c:[0-9]+: visit_decref: Assertion "\(\(gc\)->gc.gc_refs >> \(1\)\) != 0" failed.')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'refcount was too small')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'object : \[\]')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'type : list')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'refcount: 1')
|
||||
+ self.assertRegex(stderr,
|
||||
+ b'address : 0x[0-9a-f]+')
|
||||
+
|
||||
+
|
||||
class GCTogglingTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
gc.enable()
|
||||
diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c
|
||||
index 0c6f444..87edd5a 100644
|
||||
--- a/Modules/gcmodule.c
|
||||
+++ b/Modules/gcmodule.c
|
||||
@@ -341,7 +341,8 @@ update_refs(PyGC_Head *containers)
|
||||
{
|
||||
PyGC_Head *gc = containers->gc.gc_next;
|
||||
for (; gc != containers; gc = gc->gc.gc_next) {
|
||||
- assert(_PyGCHead_REFS(gc) == GC_REACHABLE);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) == GC_REACHABLE);
|
||||
_PyGCHead_SET_REFS(gc, Py_REFCNT(FROM_GC(gc)));
|
||||
/* Python's cyclic gc should never see an incoming refcount
|
||||
* of 0: if something decref'ed to 0, it should have been
|
||||
@@ -361,7 +362,8 @@ update_refs(PyGC_Head *containers)
|
||||
* so serious that maybe this should be a release-build
|
||||
* check instead of an assert?
|
||||
*/
|
||||
- assert(_PyGCHead_REFS(gc) != 0);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) != 0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -376,7 +378,9 @@ visit_decref(PyObject *op, void *data)
|
||||
* generation being collected, which can be recognized
|
||||
* because only they have positive gc_refs.
|
||||
*/
|
||||
- assert(_PyGCHead_REFS(gc) != 0); /* else refcount was too small */
|
||||
+ PyObject_ASSERT_WITH_MSG(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) != 0,
|
||||
+ "refcount was too small"); /* else refcount was too small */
|
||||
if (_PyGCHead_REFS(gc) > 0)
|
||||
_PyGCHead_DECREF(gc);
|
||||
}
|
||||
@@ -436,9 +440,10 @@ visit_reachable(PyObject *op, PyGC_Head *reachable)
|
||||
* If gc_refs == GC_UNTRACKED, it must be ignored.
|
||||
*/
|
||||
else {
|
||||
- assert(gc_refs > 0
|
||||
- || gc_refs == GC_REACHABLE
|
||||
- || gc_refs == GC_UNTRACKED);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ gc_refs > 0
|
||||
+ || gc_refs == GC_REACHABLE
|
||||
+ || gc_refs == GC_UNTRACKED);
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
@@ -480,7 +485,7 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable)
|
||||
*/
|
||||
PyObject *op = FROM_GC(gc);
|
||||
traverseproc traverse = Py_TYPE(op)->tp_traverse;
|
||||
- assert(_PyGCHead_REFS(gc) > 0);
|
||||
+ PyObject_ASSERT(op, _PyGCHead_REFS(gc) > 0);
|
||||
_PyGCHead_SET_REFS(gc, GC_REACHABLE);
|
||||
(void) traverse(op,
|
||||
(visitproc)visit_reachable,
|
||||
@@ -543,7 +548,7 @@ move_legacy_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers)
|
||||
for (gc = unreachable->gc.gc_next; gc != unreachable; gc = next) {
|
||||
PyObject *op = FROM_GC(gc);
|
||||
|
||||
- assert(IS_TENTATIVELY_UNREACHABLE(op));
|
||||
+ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op));
|
||||
next = gc->gc.gc_next;
|
||||
|
||||
if (has_legacy_finalizer(op)) {
|
||||
@@ -619,7 +624,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
PyWeakReference **wrlist;
|
||||
|
||||
op = FROM_GC(gc);
|
||||
- assert(IS_TENTATIVELY_UNREACHABLE(op));
|
||||
+ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op));
|
||||
next = gc->gc.gc_next;
|
||||
|
||||
if (! PyType_SUPPORTS_WEAKREFS(Py_TYPE(op)))
|
||||
@@ -640,9 +645,9 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
* the callback pointer intact. Obscure: it also
|
||||
* changes *wrlist.
|
||||
*/
|
||||
- assert(wr->wr_object == op);
|
||||
+ PyObject_ASSERT(wr->wr_object, wr->wr_object == op);
|
||||
_PyWeakref_ClearRef(wr);
|
||||
- assert(wr->wr_object == Py_None);
|
||||
+ PyObject_ASSERT(wr->wr_object, wr->wr_object == Py_None);
|
||||
if (wr->wr_callback == NULL)
|
||||
continue; /* no callback */
|
||||
|
||||
@@ -676,7 +681,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
*/
|
||||
if (IS_TENTATIVELY_UNREACHABLE(wr))
|
||||
continue;
|
||||
- assert(IS_REACHABLE(wr));
|
||||
+ PyObject_ASSERT(op, IS_REACHABLE(wr));
|
||||
|
||||
/* Create a new reference so that wr can't go away
|
||||
* before we can process it again.
|
||||
@@ -685,7 +690,8 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
|
||||
/* Move wr to wrcb_to_call, for the next pass. */
|
||||
wrasgc = AS_GC(wr);
|
||||
- assert(wrasgc != next); /* wrasgc is reachable, but
|
||||
+ PyObject_ASSERT(op, wrasgc != next);
|
||||
+ /* wrasgc is reachable, but
|
||||
next isn't, so they can't
|
||||
be the same */
|
||||
gc_list_move(wrasgc, &wrcb_to_call);
|
||||
@@ -701,11 +707,11 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old)
|
||||
|
||||
gc = wrcb_to_call.gc.gc_next;
|
||||
op = FROM_GC(gc);
|
||||
- assert(IS_REACHABLE(op));
|
||||
- assert(PyWeakref_Check(op));
|
||||
+ PyObject_ASSERT(op, IS_REACHABLE(op));
|
||||
+ PyObject_ASSERT(op, PyWeakref_Check(op));
|
||||
wr = (PyWeakReference *)op;
|
||||
callback = wr->wr_callback;
|
||||
- assert(callback != NULL);
|
||||
+ PyObject_ASSERT(op, callback != NULL);
|
||||
|
||||
/* copy-paste of weakrefobject.c's handle_callback() */
|
||||
temp = PyObject_CallFunctionObjArgs(callback, wr, NULL);
|
||||
@@ -822,12 +828,14 @@ check_garbage(PyGC_Head *collectable)
|
||||
for (gc = collectable->gc.gc_next; gc != collectable;
|
||||
gc = gc->gc.gc_next) {
|
||||
_PyGCHead_SET_REFS(gc, Py_REFCNT(FROM_GC(gc)));
|
||||
- assert(_PyGCHead_REFS(gc) != 0);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) != 0);
|
||||
}
|
||||
subtract_refs(collectable);
|
||||
for (gc = collectable->gc.gc_next; gc != collectable;
|
||||
gc = gc->gc.gc_next) {
|
||||
- assert(_PyGCHead_REFS(gc) >= 0);
|
||||
+ PyObject_ASSERT(FROM_GC(gc),
|
||||
+ _PyGCHead_REFS(gc) >= 0);
|
||||
if (_PyGCHead_REFS(gc) != 0)
|
||||
return -1;
|
||||
}
|
||||
diff --git a/Objects/object.c b/Objects/object.c
|
||||
index 559794f..a47d47f 100644
|
||||
--- a/Objects/object.c
|
||||
+++ b/Objects/object.c
|
||||
@@ -2022,6 +2022,35 @@ _PyTrash_thread_destroy_chain(void)
|
||||
}
|
||||
}
|
||||
|
||||
+PyAPI_FUNC(void)
|
||||
+_PyObject_AssertFailed(PyObject *obj, const char *msg, const char *expr,
|
||||
+ const char *file, int line, const char *function)
|
||||
+{
|
||||
+ fprintf(stderr,
|
||||
+ "%s:%d: %s: Assertion \"%s\" failed.\n",
|
||||
+ file, line, function, expr);
|
||||
+ if (msg) {
|
||||
+ fprintf(stderr, "%s\n", msg);
|
||||
+ }
|
||||
+
|
||||
+ fflush(stderr);
|
||||
+
|
||||
+ if (obj) {
|
||||
+ /* This might succeed or fail, but we're about to abort, so at least
|
||||
+ try to provide any extra info we can: */
|
||||
+ _PyObject_Dump(obj);
|
||||
+ }
|
||||
+ else {
|
||||
+ fprintf(stderr, "NULL object\n");
|
||||
+ }
|
||||
+
|
||||
+ fflush(stdout);
|
||||
+ fflush(stderr);
|
||||
+
|
||||
+ /* Terminate the process: */
|
||||
+ abort();
|
||||
+}
|
||||
+
|
||||
#ifndef Py_TRACE_REFS
|
||||
/* For Py_LIMITED_API, we need an out-of-line version of _Py_Dealloc.
|
||||
Define this here, so we can undefine the macro. */
|
|
@ -0,0 +1,30 @@
|
|||
diff -r 39b9b05c3085 Lib/distutils/sysconfig.py
|
||||
--- a/Lib/distutils/sysconfig.py Wed Apr 10 00:27:23 2013 +0200
|
||||
+++ b/Lib/distutils/sysconfig.py Wed Apr 10 10:14:18 2013 +0200
|
||||
@@ -362,7 +362,10 @@
|
||||
done[n] = item = ""
|
||||
if found:
|
||||
after = value[m.end():]
|
||||
- value = value[:m.start()] + item + after
|
||||
+ value = value[:m.start()]
|
||||
+ if item.strip() not in value:
|
||||
+ value += item
|
||||
+ value += after
|
||||
if "$" in after:
|
||||
notdone[name] = value
|
||||
else:
|
||||
diff -r 39b9b05c3085 Lib/sysconfig.py
|
||||
--- a/Lib/sysconfig.py Wed Apr 10 00:27:23 2013 +0200
|
||||
+++ b/Lib/sysconfig.py Wed Apr 10 10:14:18 2013 +0200
|
||||
@@ -296,7 +296,10 @@
|
||||
|
||||
if found:
|
||||
after = value[m.end():]
|
||||
- value = value[:m.start()] + item + after
|
||||
+ value = value[:m.start()]
|
||||
+ if item.strip() not in value:
|
||||
+ value += item
|
||||
+ value += after
|
||||
if "$" in after:
|
||||
notdone[name] = value
|
||||
else:
|
|
@ -0,0 +1,13 @@
|
|||
diff --git a/config.sub b/config.sub
|
||||
index 40ea5df..932128b 100755
|
||||
--- a/config.sub
|
||||
+++ b/config.sub
|
||||
@@ -1045,7 +1045,7 @@ case $basic_machine in
|
||||
;;
|
||||
ppc64) basic_machine=powerpc64-unknown
|
||||
;;
|
||||
- ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
|
||||
+ ppc64-* | ppc64p7-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
|
||||
;;
|
||||
ppc64le | powerpc64little)
|
||||
basic_machine=powerpc64le-unknown
|
|
@ -0,0 +1,14 @@
|
|||
diff -r 7fa3e824a4ee Lib/test/test_py_compile.py
|
||||
--- a/Lib/test/test_py_compile.py Tue Oct 29 22:25:06 2013 -0400
|
||||
+++ b/Lib/test/test_py_compile.py Wed Oct 30 11:08:31 2013 +0100
|
||||
@@ -54,6 +54,10 @@
|
||||
self.assertTrue(os.path.exists(self.pyc_path))
|
||||
self.assertFalse(os.path.exists(self.cache_path))
|
||||
|
||||
+ def test_bad_coding(self):
|
||||
+ bad_coding = os.path.join(os.path.dirname(__file__), 'bad_coding2.py')
|
||||
+ self.assertIsNone(py_compile.compile(bad_coding, doraise=False))
|
||||
+
|
||||
def test_relative_path(self):
|
||||
py_compile.compile(os.path.relpath(self.source_path),
|
||||
os.path.relpath(self.pyc_path))
|
|
@ -0,0 +1,11 @@
|
|||
diff -r 28c04e954bb6 Lib/lib2to3/main.py
|
||||
--- a/Lib/lib2to3/main.py Tue Oct 29 22:25:55 2013 -0400
|
||||
+++ b/Lib/lib2to3/main.py Wed Nov 06 14:33:07 2013 +0100
|
||||
@@ -213,6 +213,7 @@
|
||||
|
||||
# Set up logging handler
|
||||
level = logging.DEBUG if options.verbose else logging.INFO
|
||||
+ logging.root.handlers = []
|
||||
logging.basicConfig(format='%(name)s: %(message)s', level=level)
|
||||
logger = logging.getLogger('lib2to3.main')
|
||||
|
|
@ -0,0 +1,233 @@
|
|||
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py
|
||||
index d69e09f..5cb12df 100644
|
||||
--- a/Lib/ensurepip/__init__.py
|
||||
+++ b/Lib/ensurepip/__init__.py
|
||||
@@ -1,8 +1,10 @@
|
||||
import os
|
||||
import os.path
|
||||
import pkgutil
|
||||
+import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
+from ensurepip import rewheel
|
||||
|
||||
|
||||
__all__ = ["version", "bootstrap"]
|
||||
@@ -25,6 +27,8 @@ def _run_pip(args, additional_paths=None):
|
||||
|
||||
# Install the bundled software
|
||||
import pip
|
||||
+ if args[0] in ["install", "list", "wheel"]:
|
||||
+ args.append('--pre')
|
||||
return pip.main(args)
|
||||
|
||||
|
||||
@@ -88,20 +92,39 @@ def _bootstrap(*, root=None, upgrade=False, user=False,
|
||||
# omit pip and easy_install
|
||||
os.environ["ENSUREPIP_OPTIONS"] = "install"
|
||||
|
||||
+ whls = []
|
||||
+ rewheel_dir = None
|
||||
+ # try to see if we have system-wide versions of _PROJECTS
|
||||
+ dep_records = rewheel.find_system_records([p[0] for p in _PROJECTS])
|
||||
+ # TODO: check if system-wide versions are the newest ones
|
||||
+ # if --upgrade is used?
|
||||
+ if all(dep_records):
|
||||
+ # if we have all _PROJECTS installed system-wide, we'll recreate
|
||||
+ # wheels from them and install those
|
||||
+ rewheel_dir = tempfile.TemporaryDirectory()
|
||||
+ for dr in dep_records:
|
||||
+ new_whl = rewheel.rewheel_from_record(dr, rewheel_dir.name)
|
||||
+ whls.append(os.path.join(rewheel_dir.name, new_whl))
|
||||
+ else:
|
||||
+ # if we don't have all the _PROJECTS installed system-wide,
|
||||
+ # let's just fall back to bundled wheels
|
||||
+ for project, version in _PROJECTS:
|
||||
+ whl = os.path.join(
|
||||
+ os.path.dirname(__file__),
|
||||
+ "_bundled",
|
||||
+ "{}-{}-py2.py3-none-any.whl".format(project, version)
|
||||
+ )
|
||||
+ whls.append(whl)
|
||||
+
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
# Put our bundled wheels into a temporary directory and construct the
|
||||
# additional paths that need added to sys.path
|
||||
additional_paths = []
|
||||
- for project, version in _PROJECTS:
|
||||
- wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version)
|
||||
- whl = pkgutil.get_data(
|
||||
- "ensurepip",
|
||||
- "_bundled/{}".format(wheel_name),
|
||||
- )
|
||||
- with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
|
||||
- fp.write(whl)
|
||||
-
|
||||
- additional_paths.append(os.path.join(tmpdir, wheel_name))
|
||||
+ for whl in whls:
|
||||
+ shutil.copy(whl, tmpdir)
|
||||
+ additional_paths.append(os.path.join(tmpdir, os.path.basename(whl)))
|
||||
+ if rewheel_dir:
|
||||
+ rewheel_dir.cleanup()
|
||||
|
||||
# Construct the arguments to be passed to the pip command
|
||||
args = ["install", "--no-index", "--find-links", tmpdir]
|
||||
diff -Nur Python-3.4.1/Lib/ensurepip/rewheel/__init__.py Python-3.4.1-rewheel/Lib/ensurepip/rewheel/__init__.py
|
||||
--- Python-3.4.1/Lib/ensurepip/rewheel/__init__.py 1970-01-01 01:00:00.000000000 +0100
|
||||
+++ Python-3.4.1-rewheel/Lib/ensurepip/rewheel/__init__.py 2014-08-21 10:11:22.560320121 +0200
|
||||
@@ -0,0 +1,143 @@
|
||||
+import argparse
|
||||
+import codecs
|
||||
+import csv
|
||||
+import email.parser
|
||||
+import os
|
||||
+import io
|
||||
+import re
|
||||
+import site
|
||||
+import subprocess
|
||||
+import sys
|
||||
+import zipfile
|
||||
+
|
||||
+def run():
|
||||
+ parser = argparse.ArgumentParser(description='Recreate wheel of package with given RECORD.')
|
||||
+ parser.add_argument('record_path',
|
||||
+ help='Path to RECORD file')
|
||||
+ parser.add_argument('-o', '--output-dir',
|
||||
+ help='Dir where to place the wheel, defaults to current working dir.',
|
||||
+ dest='outdir',
|
||||
+ default=os.path.curdir)
|
||||
+
|
||||
+ ns = parser.parse_args()
|
||||
+ retcode = 0
|
||||
+ try:
|
||||
+ print(rewheel_from_record(**vars(ns)))
|
||||
+ except BaseException as e:
|
||||
+ print('Failed: {}'.format(e))
|
||||
+ retcode = 1
|
||||
+ sys.exit(1)
|
||||
+
|
||||
+def find_system_records(projects):
|
||||
+ """Return list of paths to RECORD files for system-installed projects.
|
||||
+
|
||||
+ If a project is not installed, the resulting list contains None instead
|
||||
+ of a path to its RECORD
|
||||
+ """
|
||||
+ records = []
|
||||
+ # get system site-packages dirs
|
||||
+ sys_sitepack = site.getsitepackages([sys.base_prefix, sys.base_exec_prefix])
|
||||
+ sys_sitepack = [sp for sp in sys_sitepack if os.path.exists(sp)]
|
||||
+ # try to find all projects in all system site-packages
|
||||
+ for project in projects:
|
||||
+ path = None
|
||||
+ for sp in sys_sitepack:
|
||||
+ dist_info_re = os.path.join(sp, project) + r'-[^\{0}]+\.dist-info'.format(os.sep)
|
||||
+ candidates = [os.path.join(sp, p) for p in os.listdir(sp)]
|
||||
+ # filter out candidate dirs based on the above regexp
|
||||
+ filtered = [c for c in candidates if re.match(dist_info_re, c)]
|
||||
+ # if we have 0 or 2 or more dirs, something is wrong...
|
||||
+ if len(filtered) == 1:
|
||||
+ path = filtered[0]
|
||||
+ if path is not None:
|
||||
+ records.append(os.path.join(path, 'RECORD'))
|
||||
+ else:
|
||||
+ records.append(None)
|
||||
+ return records
|
||||
+
|
||||
+def rewheel_from_record(record_path, outdir):
|
||||
+ """Recreates a whee of package with given record_path and returns path
|
||||
+ to the newly created wheel."""
|
||||
+ site_dir = os.path.dirname(os.path.dirname(record_path))
|
||||
+ record_relpath = record_path[len(site_dir):].strip(os.path.sep)
|
||||
+ to_write, to_omit = get_records_to_pack(site_dir, record_relpath)
|
||||
+ new_wheel_name = get_wheel_name(record_path)
|
||||
+ new_wheel_path = os.path.join(outdir, new_wheel_name + '.whl')
|
||||
+
|
||||
+ new_wheel = zipfile.ZipFile(new_wheel_path, mode='w', compression=zipfile.ZIP_DEFLATED)
|
||||
+ # we need to write a new record with just the files that we will write,
|
||||
+ # e.g. not binaries and *.pyc/*.pyo files
|
||||
+ new_record = io.StringIO()
|
||||
+ writer = csv.writer(new_record)
|
||||
+
|
||||
+ # handle files that we can write straight away
|
||||
+ for f, sha_hash, size in to_write:
|
||||
+ new_wheel.write(os.path.join(site_dir, f), arcname=f)
|
||||
+ writer.writerow([f, sha_hash,size])
|
||||
+
|
||||
+ # rewrite the old wheel file with a new computed one
|
||||
+ writer.writerow([record_relpath, '', ''])
|
||||
+ new_wheel.writestr(record_relpath, new_record.getvalue())
|
||||
+
|
||||
+ new_wheel.close()
|
||||
+
|
||||
+ return new_wheel.filename
|
||||
+
|
||||
+def get_wheel_name(record_path):
|
||||
+ """Return proper name of the wheel, without .whl."""
|
||||
+
|
||||
+ wheel_info_path = os.path.join(os.path.dirname(record_path), 'WHEEL')
|
||||
+ with codecs.open(wheel_info_path, encoding='utf-8') as wheel_info_file:
|
||||
+ wheel_info = email.parser.Parser().parsestr(wheel_info_file.read())
|
||||
+
|
||||
+ metadata_path = os.path.join(os.path.dirname(record_path), 'METADATA')
|
||||
+ with codecs.open(metadata_path, encoding='utf-8') as metadata_file:
|
||||
+ metadata = email.parser.Parser().parsestr(metadata_file.read())
|
||||
+
|
||||
+ # construct name parts according to wheel spec
|
||||
+ distribution = metadata.get('Name')
|
||||
+ version = metadata.get('Version')
|
||||
+ build_tag = '' # nothing for now
|
||||
+ lang_tag = []
|
||||
+ for t in wheel_info.get_all('Tag'):
|
||||
+ lang_tag.append(t.split('-')[0])
|
||||
+ lang_tag = '.'.join(lang_tag)
|
||||
+ abi_tag, plat_tag = wheel_info.get('Tag').split('-')[1:3]
|
||||
+ # leave out build tag, if it is empty
|
||||
+ to_join = filter(None, [distribution, version, build_tag, lang_tag, abi_tag, plat_tag])
|
||||
+ return '-'.join(list(to_join))
|
||||
+
|
||||
+def get_records_to_pack(site_dir, record_relpath):
|
||||
+ """Accepts path of sitedir and path of RECORD file relative to it.
|
||||
+ Returns two lists:
|
||||
+ - list of files that can be written to new RECORD straight away
|
||||
+ - list of files that shouldn't be written or need some processing
|
||||
+ (pyc and pyo files, scripts)
|
||||
+ """
|
||||
+ record_file_path = os.path.join(site_dir, record_relpath)
|
||||
+ with codecs.open(record_file_path, encoding='utf-8') as record_file:
|
||||
+ record_contents = record_file.read()
|
||||
+ # temporary fix for https://github.com/pypa/pip/issues/1376
|
||||
+ # we need to ignore files under ".data" directory
|
||||
+ data_dir = os.path.dirname(record_relpath).strip(os.path.sep)
|
||||
+ data_dir = data_dir[:-len('dist-info')] + 'data'
|
||||
+
|
||||
+ to_write = []
|
||||
+ to_omit = []
|
||||
+ for l in record_contents.splitlines():
|
||||
+ spl = l.split(',')
|
||||
+ if len(spl) == 3:
|
||||
+ # new record will omit (or write differently):
|
||||
+ # - abs paths, paths with ".." (entry points),
|
||||
+ # - pyc+pyo files
|
||||
+ # - the old RECORD file
|
||||
+ # TODO: is there any better way to recognize an entry point?
|
||||
+ if os.path.isabs(spl[0]) or spl[0].startswith('..') or \
|
||||
+ spl[0].endswith('.pyc') or spl[0].endswith('.pyo') or \
|
||||
+ spl[0] == record_relpath or spl[0].startswith(data_dir):
|
||||
+ to_omit.append(spl)
|
||||
+ else:
|
||||
+ to_write.append(spl)
|
||||
+ else:
|
||||
+ pass # bad RECORD or empty line
|
||||
+ return to_write, to_omit
|
||||
diff -Nur Python-3.4.1/Makefile.pre.in Python-3.4.1-rewheel/Makefile.pre.in
|
||||
--- Python-3.4.1/Makefile.pre.in 2014-08-21 10:49:31.512695040 +0200
|
||||
+++ Python-3.4.1-rewheel/Makefile.pre.in 2014-08-21 10:10:41.961341722 +0200
|
||||
@@ -1145,7 +1145,7 @@
|
||||
test/test_asyncio \
|
||||
collections concurrent concurrent/futures encodings \
|
||||
email email/mime test/test_email test/test_email/data \
|
||||
- ensurepip ensurepip/_bundled \
|
||||
+ ensurepip ensurepip/_bundled ensurepip/rewheel \
|
||||
html json test/test_json http dbm xmlrpc \
|
||||
sqlite3 sqlite3/test \
|
||||
logging csv wsgiref urllib \
|
|
@ -0,0 +1,12 @@
|
|||
diff -up Python-3.5.0/Makefile.pre.in.lib Python-3.5.0/Makefile.pre.in
|
||||
--- Python-3.5.0/Makefile.pre.in.lib 2015-09-21 15:39:47.928286620 +0200
|
||||
+++ Python-3.5.0/Makefile.pre.in 2015-09-21 15:42:58.004042762 +0200
|
||||
@@ -1340,7 +1340,7 @@ inclinstall:
|
||||
|
||||
# Install the library and miscellaneous stuff needed for extending/embedding
|
||||
# This goes into $(exec_prefix)
|
||||
-LIBPL= @LIBPL@
|
||||
+LIBPL= $(LIBDEST)/config-$(LDVERSION)-$(MULTIARCH)
|
||||
|
||||
# pkgconfig directory
|
||||
LIBPC= $(LIBDIR)/pkgconfig
|
|
@ -0,0 +1,15 @@
|
|||
diff -up Python-3.5.0/configure.ac.eabi Python-3.5.0/configure.ac
|
||||
--- Python-3.5.0/configure.eabi 2015-09-23 13:52:20.756909744 +0200
|
||||
+++ Python-3.5.0/configure 2015-09-23 13:52:46.859163629 +0200
|
||||
@@ -762,9 +762,9 @@ cat >> conftest.c <<EOF
|
||||
alpha-linux-gnu
|
||||
# elif defined(__ARM_EABI__) && defined(__ARM_PCS_VFP)
|
||||
# if defined(__ARMEL__)
|
||||
- arm-linux-gnueabihf
|
||||
+ arm-linux-gnueabi
|
||||
# else
|
||||
- armeb-linux-gnueabihf
|
||||
+ armeb-linux-gnueabi
|
||||
# endif
|
||||
# elif defined(__ARM_EABI__) && !defined(__ARM_PCS_VFP)
|
||||
# if defined(__ARMEL__)
|
|
@ -0,0 +1,42 @@
|
|||
diff -urp Python-3.5.0/configure p/configure
|
||||
--- Python-3.5.0/configure 2016-02-25 16:12:12.615184011 +0000
|
||||
+++ p/configure 2016-02-25 16:13:01.293412517 +0000
|
||||
@@ -5133,7 +5133,7 @@ cat >> conftest.c <<EOF
|
||||
# elif _MIPS_SIM == _ABIN32
|
||||
mips64el-linux-gnuabin32
|
||||
# elif _MIPS_SIM == _ABI64
|
||||
- mips64el-linux-gnuabi64
|
||||
+ mips64el-linux-gnu
|
||||
# else
|
||||
# error unknown platform triplet
|
||||
# endif
|
||||
@@ -5143,7 +5143,7 @@ cat >> conftest.c <<EOF
|
||||
# elif _MIPS_SIM == _ABIN32
|
||||
mips64-linux-gnuabin32
|
||||
# elif _MIPS_SIM == _ABI64
|
||||
- mips64-linux-gnuabi64
|
||||
+ mips64-linux-gnu
|
||||
# else
|
||||
# error unknown platform triplet
|
||||
# endif
|
||||
diff -urp Python-3.5.0/configure.ac p/configure.ac
|
||||
--- Python-3.5.0/configure.ac 2016-02-25 16:12:11.663159985 +0000
|
||||
+++ p/configure.ac 2016-02-25 16:13:18.814854710 +0000
|
||||
@@ -784,7 +784,7 @@ cat >> conftest.c <<EOF
|
||||
# elif _MIPS_SIM == _ABIN32
|
||||
mips64el-linux-gnuabin32
|
||||
# elif _MIPS_SIM == _ABI64
|
||||
- mips64el-linux-gnuabi64
|
||||
+ mips64el-linux-gnu
|
||||
# else
|
||||
# error unknown platform triplet
|
||||
# endif
|
||||
@@ -794,7 +794,7 @@ cat >> conftest.c <<EOF
|
||||
# elif _MIPS_SIM == _ABIN32
|
||||
mips64-linux-gnuabin32
|
||||
# elif _MIPS_SIM == _ABI64
|
||||
- mips64-linux-gnuabi64
|
||||
+ mips64-linux-gnu
|
||||
# else
|
||||
# error unknown platform triplet
|
||||
# endif
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,106 @@
|
|||
diff --git a/Doc/whatsnew/3.6.rst b/Doc/whatsnew/3.6.rst
|
||||
index 847b50140a6..570dc3ed6fe 100644
|
||||
--- a/Doc/whatsnew/3.6.rst
|
||||
+++ b/Doc/whatsnew/3.6.rst
|
||||
@@ -1852,10 +1852,10 @@ Build and C API Changes
|
||||
* The :c:func:`PyUnicode_FSConverter` and :c:func:`PyUnicode_FSDecoder`
|
||||
functions will now accept :term:`path-like objects <path-like object>`.
|
||||
|
||||
-* The ``PyExc_RecursionErrorInst`` singleton that was part of the public API
|
||||
- has been removed as its members being never cleared may cause a segfault
|
||||
- during finalization of the interpreter. Contributed by Xavier de Gaye in
|
||||
- :issue:`22898` and :issue:`30697`.
|
||||
+* The ``PyExc_RecursionErrorInst`` singleton is not used anymore as its members
|
||||
+ being never cleared may cause a segfault during finalization of the
|
||||
+ interpreter. Contributed by Xavier de Gaye in :issue:`22898` and
|
||||
+ :issue:`30697`.
|
||||
|
||||
|
||||
Other Improvements
|
||||
diff --git a/Include/pyerrors.h b/Include/pyerrors.h
|
||||
index c28c1373f82..8c1dbc5047b 100644
|
||||
--- a/Include/pyerrors.h
|
||||
+++ b/Include/pyerrors.h
|
||||
@@ -219,6 +219,8 @@ PyAPI_DATA(PyObject *) PyExc_IOError;
|
||||
PyAPI_DATA(PyObject *) PyExc_WindowsError;
|
||||
#endif
|
||||
|
||||
+PyAPI_DATA(PyObject *) PyExc_RecursionErrorInst;
|
||||
+
|
||||
/* Predefined warning categories */
|
||||
PyAPI_DATA(PyObject *) PyExc_Warning;
|
||||
PyAPI_DATA(PyObject *) PyExc_UserWarning;
|
||||
diff --git a/Misc/NEWS.d/next/C API/2017-12-20-15-23-06.bpo-30697.v9FmgG.rst b/Misc/NEWS.d/next/C API/2017-12-20-15-23-06.bpo-30697.v9FmgG.rst
|
||||
new file mode 100644
|
||||
index 00000000000..28f74ad4f30
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/C API/2017-12-20-15-23-06.bpo-30697.v9FmgG.rst
|
||||
@@ -0,0 +1 @@
|
||||
+Restore PyExc_RecursionErrorInst in 3.6
|
||||
diff --git a/Objects/exceptions.c b/Objects/exceptions.c
|
||||
index df4899372a5..271e293e325 100644
|
||||
--- a/Objects/exceptions.c
|
||||
+++ b/Objects/exceptions.c
|
||||
@@ -2430,6 +2430,12 @@ SimpleExtendsException(PyExc_Warning, ResourceWarning,
|
||||
|
||||
|
||||
|
||||
+/* Pre-computed RecursionError instance for when recursion depth is reached.
|
||||
+ Meant to be used when normalizing the exception for exceeding the recursion
|
||||
+ depth will cause its own infinite recursion.
|
||||
+*/
|
||||
+PyObject *PyExc_RecursionErrorInst = NULL;
|
||||
+
|
||||
#define PRE_INIT(TYPE) \
|
||||
if (!(_PyExc_ ## TYPE.tp_flags & Py_TPFLAGS_READY)) { \
|
||||
if (PyType_Ready(&_PyExc_ ## TYPE) < 0) \
|
||||
@@ -2691,11 +2697,37 @@ _PyExc_Init(PyObject *bltinmod)
|
||||
ADD_ERRNO(TimeoutError, ETIMEDOUT);
|
||||
|
||||
preallocate_memerrors();
|
||||
+
|
||||
+ if (!PyExc_RecursionErrorInst) {
|
||||
+ PyExc_RecursionErrorInst = BaseException_new(&_PyExc_RecursionError, NULL, NULL);
|
||||
+ if (!PyExc_RecursionErrorInst)
|
||||
+ Py_FatalError("Cannot pre-allocate RecursionError instance for "
|
||||
+ "recursion errors");
|
||||
+ else {
|
||||
+ PyBaseExceptionObject *err_inst =
|
||||
+ (PyBaseExceptionObject *)PyExc_RecursionErrorInst;
|
||||
+ PyObject *args_tuple;
|
||||
+ PyObject *exc_message;
|
||||
+ exc_message = PyUnicode_FromString("maximum recursion depth exceeded");
|
||||
+ if (!exc_message)
|
||||
+ Py_FatalError("cannot allocate argument for RecursionError "
|
||||
+ "pre-allocation");
|
||||
+ args_tuple = PyTuple_Pack(1, exc_message);
|
||||
+ if (!args_tuple)
|
||||
+ Py_FatalError("cannot allocate tuple for RecursionError "
|
||||
+ "pre-allocation");
|
||||
+ Py_DECREF(exc_message);
|
||||
+ if (BaseException_init(err_inst, args_tuple, NULL))
|
||||
+ Py_FatalError("init of pre-allocated RecursionError failed");
|
||||
+ Py_DECREF(args_tuple);
|
||||
+ }
|
||||
+ }
|
||||
}
|
||||
|
||||
void
|
||||
_PyExc_Fini(void)
|
||||
{
|
||||
+ Py_CLEAR(PyExc_RecursionErrorInst);
|
||||
free_preallocated_memerrors();
|
||||
Py_CLEAR(errnomap);
|
||||
}
|
||||
diff --git a/PC/python3.def b/PC/python3.def
|
||||
index 4fc4a6814ee..ff70718fc37 100644
|
||||
--- a/PC/python3.def
|
||||
+++ b/PC/python3.def
|
||||
@@ -224,6 +224,7 @@ EXPORTS
|
||||
PyExc_PermissionError=python36.PyExc_PermissionError DATA
|
||||
PyExc_ProcessLookupError=python36.PyExc_ProcessLookupError DATA
|
||||
PyExc_RecursionError=python36.PyExc_RecursionError DATA
|
||||
+ PyExc_RecursionErrorInst=python36.PyExc_RecursionErrorInst DATA
|
||||
PyExc_ReferenceError=python36.PyExc_ReferenceError DATA
|
||||
PyExc_ResourceWarning=python36.PyExc_ResourceWarning DATA
|
||||
PyExc_RuntimeError=python36.PyExc_RuntimeError DATA
|
|
@ -0,0 +1,104 @@
|
|||
From 5affd5c29eb1493cb31ef3cfdde15538ac134689 Mon Sep 17 00:00:00 2001
|
||||
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= <miro@hroncok.cz>
|
||||
Date: Tue, 13 Mar 2018 10:56:43 +0100
|
||||
Subject: [PATCH] bpo-32885: Tools/scripts/pathfix.py: Add -n option for no
|
||||
backup~ (#5772)
|
||||
|
||||
Creating backup files with ~ suffix can be undesirable in some environment,
|
||||
such as when building RPM packages. Instead of requiring the user to remove
|
||||
those files manually, option -n was added, that simply disables this feature.
|
||||
|
||||
-n was selected because 2to3 has the same option with this behavior.
|
||||
---
|
||||
Misc/ACKS | 1 +
|
||||
.../2018-02-20-12-16-47.bpo-32885.dL5x7C.rst | 2 ++
|
||||
Tools/scripts/pathfix.py | 28 +++++++++++++++-------
|
||||
3 files changed, 23 insertions(+), 8 deletions(-)
|
||||
create mode 100644 Misc/NEWS.d/next/Tools-Demos/2018-02-20-12-16-47.bpo-32885.dL5x7C.rst
|
||||
|
||||
diff --git a/Misc/ACKS b/Misc/ACKS
|
||||
index d8179c8b03ab..d752d8a35434 100644
|
||||
--- a/Misc/ACKS
|
||||
+++ b/Misc/ACKS
|
||||
@@ -687,6 +687,7 @@ Ken Howard
|
||||
Brad Howes
|
||||
Mike Hoy
|
||||
Ben Hoyt
|
||||
+Miro Hrončok
|
||||
Chiu-Hsiang Hsu
|
||||
Chih-Hao Huang
|
||||
Christian Hudon
|
||||
diff --git a/Misc/NEWS.d/next/Tools-Demos/2018-02-20-12-16-47.bpo-32885.dL5x7C.rst b/Misc/NEWS.d/next/Tools-Demos/2018-02-20-12-16-47.bpo-32885.dL5x7C.rst
|
||||
new file mode 100644
|
||||
index 000000000000..e003e1d84fd0
|
||||
--- /dev/null
|
||||
+++ b/Misc/NEWS.d/next/Tools-Demos/2018-02-20-12-16-47.bpo-32885.dL5x7C.rst
|
||||
@@ -0,0 +1,2 @@
|
||||
+Add an ``-n`` flag for ``Tools/scripts/pathfix.py`` to disbale automatic
|
||||
+backup creation (files with ``~`` suffix).
|
||||
diff --git a/Tools/scripts/pathfix.py b/Tools/scripts/pathfix.py
|
||||
index 562bbc737812..c5bf984306a3 100755
|
||||
--- a/Tools/scripts/pathfix.py
|
||||
+++ b/Tools/scripts/pathfix.py
|
||||
@@ -7,8 +7,9 @@
|
||||
# Directories are searched recursively for files whose name looks
|
||||
# like a python module.
|
||||
# Symbolic links are always ignored (except as explicit directory
|
||||
-# arguments). Of course, the original file is kept as a back-up
|
||||
-# (with a "~" attached to its name).
|
||||
+# arguments).
|
||||
+# The original file is kept as a back-up (with a "~" attached to its name),
|
||||
+# -n flag can be used to disable this.
|
||||
#
|
||||
# Undoubtedly you can do this using find and sed or perl, but this is
|
||||
# a nice example of Python code that recurses down a directory tree
|
||||
@@ -31,14 +32,17 @@
|
||||
|
||||
new_interpreter = None
|
||||
preserve_timestamps = False
|
||||
+create_backup = True
|
||||
+
|
||||
|
||||
def main():
|
||||
global new_interpreter
|
||||
global preserve_timestamps
|
||||
- usage = ('usage: %s -i /interpreter -p file-or-directory ...\n' %
|
||||
+ global create_backup
|
||||
+ usage = ('usage: %s -i /interpreter -p -n file-or-directory ...\n' %
|
||||
sys.argv[0])
|
||||
try:
|
||||
- opts, args = getopt.getopt(sys.argv[1:], 'i:p')
|
||||
+ opts, args = getopt.getopt(sys.argv[1:], 'i:pn')
|
||||
except getopt.error as msg:
|
||||
err(str(msg) + '\n')
|
||||
err(usage)
|
||||
@@ -48,6 +52,8 @@ def main():
|
||||
new_interpreter = a.encode()
|
||||
if o == '-p':
|
||||
preserve_timestamps = True
|
||||
+ if o == '-n':
|
||||
+ create_backup = False
|
||||
if not new_interpreter or not new_interpreter.startswith(b'/') or \
|
||||
not args:
|
||||
err('-i option or file-or-directory missing\n')
|
||||
@@ -134,10 +140,16 @@ def fix(filename):
|
||||
except OSError as msg:
|
||||
err('%s: warning: chmod failed (%r)\n' % (tempname, msg))
|
||||
# Then make a backup of the original file as filename~
|
||||
- try:
|
||||
- os.rename(filename, filename + '~')
|
||||
- except OSError as msg:
|
||||
- err('%s: warning: backup failed (%r)\n' % (filename, msg))
|
||||
+ if create_backup:
|
||||
+ try:
|
||||
+ os.rename(filename, filename + '~')
|
||||
+ except OSError as msg:
|
||||
+ err('%s: warning: backup failed (%r)\n' % (filename, msg))
|
||||
+ else:
|
||||
+ try:
|
||||
+ os.remove(filename)
|
||||
+ except OSError as msg:
|
||||
+ err('%s: warning: removing failed (%r)\n' % (filename, msg))
|
||||
# Now move the temp file to the original file
|
||||
try:
|
||||
os.rename(tempname, filename)
|
|
@ -0,0 +1,19 @@
|
|||
diff -up Python-3.1.1/Lib/distutils/unixccompiler.py.rpath Python-3.1.1/Lib/distutils/unixccompiler.py
|
||||
--- Python-3.1.1/Lib/distutils/unixccompiler.py.rpath 2009-09-04 17:29:34.000000000 -0400
|
||||
+++ Python-3.1.1/Lib/distutils/unixccompiler.py 2009-09-04 17:49:54.000000000 -0400
|
||||
@@ -141,6 +141,15 @@ class UnixCCompiler(CCompiler):
|
||||
if sys.platform == "cygwin":
|
||||
exe_extension = ".exe"
|
||||
|
||||
+ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
|
||||
+ """Remove standard library path from rpath"""
|
||||
+ libraries, library_dirs, runtime_library_dirs = super()._fix_lib_args(
|
||||
+ libraries, library_dirs, runtime_library_dirs)
|
||||
+ libdir = sysconfig.get_config_var('LIBDIR')
|
||||
+ if runtime_library_dirs and (libdir in runtime_library_dirs):
|
||||
+ runtime_library_dirs.remove(libdir)
|
||||
+ return libraries, library_dirs, runtime_library_dirs
|
||||
+
|
||||
def preprocess(self, source, output_file=None, macros=None,
|
||||
include_dirs=None, extra_preargs=None, extra_postargs=None):
|
||||
fixed_args = self._fix_compile_args(None, macros, include_dirs)
|
|
@ -0,0 +1,50 @@
|
|||
"""Checks if all *.pyc and *.pyo files have later mtime than their *.py files."""
|
||||
|
||||
import imp
|
||||
import os
|
||||
import sys
|
||||
|
||||
# list of test and other files that we expect not to have bytecode
|
||||
not_compiled = [
|
||||
'test/bad_coding.py',
|
||||
'test/bad_coding2.py',
|
||||
'test/badsyntax_3131.py',
|
||||
'test/badsyntax_future3.py',
|
||||
'test/badsyntax_future4.py',
|
||||
'test/badsyntax_future5.py',
|
||||
'test/badsyntax_future6.py',
|
||||
'test/badsyntax_future7.py',
|
||||
'test/badsyntax_future8.py',
|
||||
'test/badsyntax_future9.py',
|
||||
'test/badsyntax_future10.py',
|
||||
'test/badsyntax_pep3120.py',
|
||||
'lib2to3/tests/data/bom.py',
|
||||
'lib2to3/tests/data/crlf.py',
|
||||
'lib2to3/tests/data/different_encoding.py',
|
||||
'lib2to3/tests/data/false_encoding.py',
|
||||
'lib2to3/tests/data/py2_test_grammar.py',
|
||||
'.debug-gdb.py',
|
||||
]
|
||||
failed = 0
|
||||
|
||||
def bytecode_expected(source):
|
||||
for f in not_compiled:
|
||||
if source.endswith(f):
|
||||
return False
|
||||
return True
|
||||
|
||||
compiled = filter(lambda f: bytecode_expected(f), sys.argv[1:])
|
||||
for f in compiled:
|
||||
# check both pyo and pyc
|
||||
to_check = map(lambda b: imp.cache_from_source(f, b), (True, False))
|
||||
f_mtime = os.path.getmtime(f)
|
||||
for c in to_check:
|
||||
c_mtime = os.path.getmtime(c)
|
||||
if c_mtime < f_mtime:
|
||||
sys.stderr.write('Failed bytecompilation timestamps check: ')
|
||||
sys.stderr.write('Bytecode file {} is older than source file {}.\n'.format(c, f))
|
||||
failed += 1
|
||||
|
||||
if failed:
|
||||
sys.stderr.write('\n{} files failed bytecompilation timestamps check.\n'.format(failed))
|
||||
sys.exit(1)
|
|
@ -0,0 +1,35 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<!-- Copyright 2017 Zbigniew Jędrzejewski-Szmek -->
|
||||
<application>
|
||||
<id type="desktop">idle3.desktop</id>
|
||||
<name>IDLE3</name>
|
||||
<metadata_licence>CC0</metadata_licence>
|
||||
<project_license>Python-2.0</project_license>
|
||||
<summary>Python 3 Integrated Development and Learning Environment</summary>
|
||||
<description>
|
||||
<p>
|
||||
IDLE is Python’s Integrated Development and Learning Environment.
|
||||
The GUI is uniform between Windows, Unix, and Mac OS X.
|
||||
IDLE provides an easy way to start writing, running, and debugging
|
||||
Python code.
|
||||
</p>
|
||||
<p>
|
||||
IDLE is written in pure Python, and uses the tkinter GUI toolkit.
|
||||
It provides:
|
||||
</p>
|
||||
<ul>
|
||||
<li>a Python shell window (interactive interpreter) with colorizing of code input, output, and error messages,</li>
|
||||
<li>a multi-window text editor with multiple undo, Python colorizing, smart indent, call tips, auto completion, and other features,</li>
|
||||
<li>search within any window, replace within editor windows, and search through multiple files (grep),</li>
|
||||
<li>a debugger with persistent breakpoints, stepping, and viewing of global and local namespaces.</li>
|
||||
</ul>
|
||||
</description>
|
||||
<url type="homepage">https://docs.python.org/3/library/idle.html</url>
|
||||
<screenshots>
|
||||
<screenshot type="default">http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-main-window.png</screenshot>
|
||||
<screenshot>http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-class-browser.png</screenshot>
|
||||
<screenshot>http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-code-viewer.png</screenshot>
|
||||
</screenshots>
|
||||
<update_contact>zbyszek@in.waw.pl</update_contact>
|
||||
</application>
|
|
@ -0,0 +1,11 @@
|
|||
[Desktop Entry]
|
||||
Version=1.0
|
||||
Name=IDLE 3
|
||||
Comment=Python 3 Integrated Development and Learning Environment
|
||||
Exec=idle3 %F
|
||||
TryExec=idle3
|
||||
Terminal=false
|
||||
Type=Application
|
||||
Icon=idle3
|
||||
Categories=Development;IDE;
|
||||
MimeType=text/x-python;
|
|
@ -0,0 +1,17 @@
|
|||
/* Systemtap tapset to make it easier to trace Python */
|
||||
|
||||
/*
|
||||
Define python.function.entry/return:
|
||||
*/
|
||||
probe python.function.entry = process("python").library("LIBRARY_PATH").mark("function__entry")
|
||||
{
|
||||
filename = user_string($arg1);
|
||||
funcname = user_string($arg2);
|
||||
lineno = $arg3;
|
||||
}
|
||||
probe python.function.return = process("python").library("LIBRARY_PATH").mark("function__return")
|
||||
{
|
||||
filename = user_string($arg1);
|
||||
funcname = user_string($arg2);
|
||||
lineno = $arg3;
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
# Note that the path could itself be a python file, or a directory
|
||||
|
||||
# Python's compile_all module only works on directories, and requires a max
|
||||
# recursion depth
|
||||
|
||||
# Note that the py_byte_compile macro should work for python2 as well
|
||||
# Which unfortunately makes the definition more complicated than it should be
|
||||
# The condition should be reversed once /usr/bin/python is python3!
|
||||
|
||||
%py_byte_compile()\
|
||||
py2_byte_compile () {\
|
||||
python_binary="%1"\
|
||||
bytecode_compilation_path="%2"\
|
||||
find $bytecode_compilation_path -type f -a -name "*.py" -print0 | xargs -0 $python_binary -c 'import py_compile, sys; [py_compile.compile(f, dfile=f.partition("$RPM_BUILD_ROOT")[2]) for f in sys.argv[1:]]' || :\
|
||||
find $bytecode_compilation_path -type f -a -name "*.py" -print0 | xargs -0 $python_binary -O -c 'import py_compile, sys; [py_compile.compile(f, dfile=f.partition("$RPM_BUILD_ROOT")[2]) for f in sys.argv[1:]]' || :\
|
||||
}\
|
||||
\
|
||||
py3_byte_compile () {\
|
||||
python_binary="%1"\
|
||||
bytecode_compilation_path="%2"\
|
||||
find $bytecode_compilation_path -type f -a -name "*.py" -print0 | xargs -0 $python_binary -O -c 'import py_compile, sys; [py_compile.compile(f, dfile=f.partition("$RPM_BUILD_ROOT")[2], optimize=opt) for opt in range(2) for f in sys.argv[1:]]' || :\
|
||||
}\
|
||||
\
|
||||
[[ "%1" == *python3* ]] || py2_byte_compile "%1" "%2" && py3_byte_compile "%1" "%2" \
|
||||
%{nil}
|
|
@ -0,0 +1,4 @@
|
|||
%system_python_abi %{expand: \
|
||||
%global __requires_exclude ^python\\\\(abi\\\\) = 3\\\\..$
|
||||
Requires: system-python(abi) = %{python3_version}
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
#!/usr/bin/stap
|
||||
|
||||
global fn_calls;
|
||||
|
||||
probe python.function.entry
|
||||
{
|
||||
fn_calls[pid(), filename, funcname, lineno] += 1;
|
||||
}
|
||||
|
||||
probe timer.ms(1000) {
|
||||
printf("\033[2J\033[1;1H") /* clear screen */
|
||||
printf("%6s %80s %6s %30s %6s\n",
|
||||
"PID", "FILENAME", "LINE", "FUNCTION", "CALLS")
|
||||
foreach ([pid, filename, funcname, lineno] in fn_calls- limit 20) {
|
||||
printf("%6d %80s %6d %30s %6d\n",
|
||||
pid, filename, lineno, funcname,
|
||||
fn_calls[pid, filename, funcname, lineno]);
|
||||
}
|
||||
|
||||
delete fn_calls;
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
diff -up Python-3.5.0/configure.ac.than Python-3.5.0/configure.ac
|
||||
--- Python-3.5.0/configure.ac.than 2015-11-13 11:51:32.039560172 -0500
|
||||
+++ Python-3.5.0/configure.ac 2015-11-13 11:52:11.670168157 -0500
|
||||
@@ -804,9 +804,9 @@ cat >> conftest.c <<EOF
|
||||
powerpc-linux-gnuspe
|
||||
# elif defined(__powerpc64__)
|
||||
# if defined(__LITTLE_ENDIAN__)
|
||||
- powerpc64le-linux-gnu
|
||||
+ ppc64le-linux-gnu
|
||||
# else
|
||||
- powerpc64-linux-gnu
|
||||
+ ppc64-linux-gnu
|
||||
# endif
|
||||
# elif defined(__powerpc__)
|
||||
powerpc-linux-gnu
|
||||
diff -up Python-3.5.0/configure.than Python-3.5.0/configure
|
||||
--- Python-3.5.0/configure.than 2015-11-13 12:13:19.039658399 -0500
|
||||
+++ Python-3.5.0/configure 2015-11-13 12:13:35.199906857 -0500
|
||||
@@ -5153,9 +5153,9 @@ cat >> conftest.c <<EOF
|
||||
powerpc-linux-gnuspe
|
||||
# elif defined(__powerpc64__)
|
||||
# if defined(__LITTLE_ENDIAN__)
|
||||
- powerpc64le-linux-gnu
|
||||
+ ppc64le-linux-gnu
|
||||
# else
|
||||
- powerpc64-linux-gnu
|
||||
+ ppc64-linux-gnu
|
||||
# endif
|
||||
# elif defined(__powerpc__)
|
||||
powerpc-linux-gnu
|
|
@ -0,0 +1,19 @@
|
|||
/*
|
||||
Example usage of the Python systemtap tapset to show a nested view of all
|
||||
Python function calls (and returns) across the whole system.
|
||||
|
||||
Run this using
|
||||
stap systemtap-example.stp
|
||||
to instrument all Python processes on the system, or (for example) using
|
||||
stap systemtap-example.stp -c COMMAND
|
||||
to instrument a specific program (implemented in Python)
|
||||
*/
|
||||
probe python.function.entry
|
||||
{
|
||||
printf("%s => %s in %s:%d\n", thread_indent(1), funcname, filename, lineno);
|
||||
}
|
||||
|
||||
probe python.function.return
|
||||
{
|
||||
printf("%s <= %s in %s:%d\n", thread_indent(-1), funcname, filename, lineno);
|
||||
}
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue