basebuilder_pel7ppc64lebuilder0
4 years ago
75 changed files with 6169 additions and 1357 deletions
@ -0,0 +1,38 @@
@@ -0,0 +1,38 @@
|
||||
diff -up Python-2.7.3/Lib/pydoc.py.no_gui Python-2.7.3/Lib/pydoc.py |
||||
--- Python-2.7.3/Lib/pydoc.py.no_gui 2012-04-09 19:07:31.000000000 -0400 |
||||
+++ Python-2.7.3/Lib/pydoc.py 2013-02-19 13:48:44.480054515 -0500 |
||||
@@ -19,9 +19,6 @@ of all available modules. |
||||
Run "pydoc -p <port>" to start an HTTP server on a given port on the |
||||
local machine to generate documentation web pages. |
||||
|
||||
-For platforms without a command line, "pydoc -g" starts the HTTP server |
||||
-and also pops up a little window for controlling it. |
||||
- |
||||
Run "pydoc -w <name>" to write out the HTML documentation for a module |
||||
to a file named "<name>.html". |
||||
|
||||
@@ -2290,9 +2287,6 @@ def cli(): |
||||
writing = 0 |
||||
|
||||
for opt, val in opts: |
||||
- if opt == '-g': |
||||
- gui() |
||||
- return |
||||
if opt == '-k': |
||||
apropos(val) |
||||
return |
||||
@@ -2346,13 +2340,10 @@ def cli(): |
||||
%s -p <port> |
||||
Start an HTTP server on the given port on the local machine. |
||||
|
||||
-%s -g |
||||
- Pop up a graphical interface for finding and serving documentation. |
||||
- |
||||
%s -w <name> ... |
||||
Write out the HTML documentation for a module to a file in the current |
||||
directory. If <name> contains a '%s', it is treated as a filename; if |
||||
it names a directory, documentation is written for all the contents. |
||||
-""" % (cmd, os.sep, cmd, cmd, cmd, cmd, os.sep) |
||||
+""" % (cmd, os.sep, cmd, cmd, cmd, os.sep) |
||||
|
||||
if __name__ == '__main__': cli() |
@ -1,822 +1,198 @@
@@ -1,822 +1,198 @@
|
||||
diff -up Python-3.3.0rc2/configure.ac.systemtap Python-3.3.0rc2/configure.ac |
||||
--- Python-3.3.0rc2/configure.ac.systemtap 2012-09-09 05:11:14.000000000 -0400 |
||||
+++ Python-3.3.0rc2/configure.ac 2012-09-10 09:17:21.114511781 -0400 |
||||
@@ -2678,6 +2678,23 @@ if test "$with_valgrind" != no; then |
||||
OPT="-DDYNAMIC_ANNOTATIONS_ENABLED=1 $OPT" |
||||
diff -up Python-2.7rc1/configure.ac.systemtap Python-2.7rc1/configure.ac |
||||
--- Python-2.7rc1/configure.ac.systemtap 2010-06-06 10:53:15.514975012 -0400 |
||||
+++ Python-2.7rc1/configure.ac 2010-06-06 10:53:15.520974361 -0400 |
||||
@@ -2616,6 +2616,38 @@ if test "$with_valgrind" != no; then |
||||
) |
||||
fi |
||||
|
||||
+# Check for systemtap support |
||||
+# On Linux, /usr/bin/dtrace is in fact a shim to SystemTap |
||||
+AC_MSG_CHECKING([for --with-systemtap]) |
||||
+AC_ARG_WITH([systemtap], |
||||
+ AC_HELP_STRING([--with(out)-systemtap], [disable/enable SystemTap support]),, |
||||
+ with_systemtap=no) |
||||
+AC_MSG_RESULT([$with_systemtap]) |
||||
+if test "$with_systemtap" != no; then |
||||
+ AC_DEFINE(WITH_SYSTEMTAP, 1, |
||||
+ [Define if you want to compile in SystemTap support]) |
||||
+ SYSTEMTAPOBJS="Python/pysystemtap.o" |
||||
+ SYSTEMTAPDEPS="\$(srcdir)/Python/pysystemtap.h" |
||||
+fi |
||||
+ |
||||
+AC_SUBST(SYSTEMTAPOBJS) |
||||
+AC_SUBST(SYSTEMTAPDEPS) |
||||
+ |
||||
# -I${DLINCLDIR} is added to the compile rule for importdl.o |
||||
AC_SUBST(DLINCLDIR) |
||||
DLINCLDIR=. |
||||
diff -up Python-3.3.0rc2/configure.systemtap Python-3.3.0rc2/configure |
||||
--- Python-3.3.0rc2/configure.systemtap 2012-09-09 05:11:14.000000000 -0400 |
||||
+++ Python-3.3.0rc2/configure 2012-09-10 09:17:21.116511780 -0400 |
||||
@@ -618,6 +618,8 @@ TRUE |
||||
MACHDEP_OBJS |
||||
DYNLOADFILE |
||||
DLINCLDIR |
||||
+SYSTEMTAPDEPS |
||||
+SYSTEMTAPOBJS |
||||
THREADOBJ |
||||
LDLAST |
||||
USE_THREAD_MODULE |
||||
@@ -779,6 +781,7 @@ with_doc_strings |
||||
with_tsc |
||||
with_pymalloc |
||||
with_valgrind |
||||
+with_systemtap |
||||
with_fpectl |
||||
with_libm |
||||
with_libc |
||||
@@ -1456,6 +1459,7 @@ Optional Packages: |
||||
--with(out)-tsc enable/disable timestamp counter profile |
||||
--with(out)-pymalloc disable/enable specialized mallocs |
||||
--with-valgrind Enable Valgrind support |
||||
+ --with(out)-systemtap disable/enable SystemTap support |
||||
--with-fpectl enable SIGFPE catching |
||||
--with-libm=STRING math library |
||||
--with-libc=STRING C library |
||||
@@ -10065,6 +10069,31 @@ fi |
||||
OPT="-DDYNAMIC_ANNOTATIONS_ENABLED=1 $OPT" |
||||
fi |
||||
|
||||
+# Check for systemtap support |
||||
+# On Linux, /usr/bin/dtrace is in fact a shim to SystemTap |
||||
+{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for --with-systemtap" >&5 |
||||
+$as_echo_n "checking for --with-systemtap... " >&6; } |
||||
+ |
||||
+# Check whether --with-systemtap was given. |
||||
+if test "${with_systemtap+set}" = set; then : |
||||
+ withval=$with_systemtap; |
||||
+# Check for dtrace support |
||||
+AC_MSG_CHECKING(for --with-dtrace) |
||||
+AC_ARG_WITH(dtrace, |
||||
+ AC_HELP_STRING(--with(out)-dtrace, disable/enable dtrace support)) |
||||
+ |
||||
+if test ! -z "$with_dtrace" |
||||
+then |
||||
+ if dtrace -G -o /dev/null -s $srcdir/Include/pydtrace.d 2>/dev/null |
||||
+ then |
||||
+ AC_DEFINE(WITH_DTRACE, 1, |
||||
+ [Define if you want to compile in Dtrace support]) |
||||
+ with_dtrace="Sun" |
||||
+ DTRACEOBJS="Python/dtrace.o" |
||||
+ DTRADEHDRS="" |
||||
+ elif dtrace -h -o /dev/null -s $srcdir/Include/pydtrace.d |
||||
+ then |
||||
+ AC_DEFINE(WITH_DTRACE, 1, |
||||
+ [Define if you want to compile in Dtrace support]) |
||||
+ with_dtrace="Apple" |
||||
+ DTRACEOBJS="" |
||||
+ DTRADEHDRS="pydtrace.h" |
||||
+ else |
||||
+ with_dtrace="no" |
||||
+ fi |
||||
+else |
||||
+ with_systemtap=no |
||||
+fi |
||||
+ |
||||
+{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_systemtap" >&5 |
||||
+$as_echo "$with_systemtap" >&6; } |
||||
+if test "$with_systemtap" != no; then |
||||
+ |
||||
+$as_echo "#define WITH_SYSTEMTAP 1" >>confdefs.h |
||||
+ |
||||
+ SYSTEMTAPOBJS="Python/pysystemtap.o" |
||||
+ SYSTEMTAPDEPS="\$(srcdir)/Python/pysystemtap.h" |
||||
+ with_dtrace="no" |
||||
+fi |
||||
+ |
||||
+AC_MSG_RESULT($with_dtrace) |
||||
+AC_SUBST(DTRACEOBJS) |
||||
+AC_SUBST(DTRACEHDRS) |
||||
+ |
||||
# Check for --with-wctype-functions |
||||
AC_MSG_CHECKING(for --with-wctype-functions) |
||||
AC_ARG_WITH(wctype-functions, |
||||
diff -up Python-2.7rc1/Include/pydtrace.d.systemtap Python-2.7rc1/Include/pydtrace.d |
||||
--- Python-2.7rc1/Include/pydtrace.d.systemtap 2010-06-06 10:53:15.520974361 -0400 |
||||
+++ Python-2.7rc1/Include/pydtrace.d 2010-06-06 10:53:15.520974361 -0400 |
||||
@@ -0,0 +1,10 @@ |
||||
+provider python { |
||||
+ probe function__entry(const char *, const char *, int); |
||||
+ probe function__return(const char *, const char *, int); |
||||
+}; |
||||
+ |
||||
+ |
||||
+ |
||||
# -I${DLINCLDIR} is added to the compile rule for importdl.o |
||||
|
||||
DLINCLDIR=. |
||||
diff -up Python-3.3.0rc2/Doc/howto/index.rst.systemtap Python-3.3.0rc2/Doc/howto/index.rst |
||||
--- Python-3.3.0rc2/Doc/howto/index.rst.systemtap 2012-09-09 05:10:51.000000000 -0400 |
||||
+++ Python-3.3.0rc2/Doc/howto/index.rst 2012-09-10 09:17:21.117511779 -0400 |
||||
@@ -29,4 +29,5 @@ Currently, the HOWTOs are: |
||||
argparse.rst |
||||
ipaddress.rst |
||||
clinic.rst |
||||
+ instrumentation.rst |
||||
|
||||
diff -up Python-3.3.0rc2/Doc/howto/instrumentation.rst.systemtap Python-3.3.0rc2/Doc/howto/instrumentation.rst |
||||
--- Python-3.3.0rc2/Doc/howto/instrumentation.rst.systemtap 2012-09-10 09:17:21.117511779 -0400 |
||||
+++ Python-3.3.0rc2/Doc/howto/instrumentation.rst 2012-09-10 09:17:21.117511779 -0400 |
||||
@@ -0,0 +1,295 @@ |
||||
+.. _instrumentation: |
||||
+ |
||||
+==================================== |
||||
+Instrumenting CPython with SystemTap |
||||
+==================================== |
||||
+ |
||||
+:author: David Malcolm <dmalcolm@redhat.com> |
||||
+ |
||||
+DTrace and SystemTap are monitoring tools, each providing a way to inspect |
||||
+what the processes on a computer system are doing. They both use |
||||
+domain-specific languages allowing a user to write scripts which: |
||||
+ |
||||
+ - filter which processes are to be observed |
||||
+ - gather data from the processes of interest |
||||
+ - generate reports on the data |
||||
+ |
||||
+As of Python 3.3, CPython can be built with embedded "markers" that can be |
||||
+observed by a SystemTap script, making it easier to monitor what the CPython |
||||
+processes on a system are doing. |
||||
+ |
||||
+.. Potentially this document could be expanded to also cover DTrace markers. |
||||
+ However, I'm not a DTrace expert. |
||||
+ |
||||
+.. I'm using ".. code-block:: c" for SystemTap scripts, as "c" is syntactically |
||||
+ the closest match that Sphinx supports |
||||
+ |
||||
+ |
||||
+Enabling the static markers |
||||
+--------------------------- |
||||
+ |
||||
+In order to build CPython with the embedded markers for SystemTap, the |
||||
+SystemTap development tools must be installed. |
||||
+ |
||||
+On a Fedora or Red Hat Enterprise Linux machine, this can be done via:: |
||||
+ |
||||
+ yum install systemtap-sdt-devel |
||||
+ |
||||
+CPython must then be configured `--with-systemtap`:: |
||||
+ |
||||
+ checking for --with-systemtap... yes |
||||
+ |
||||
+You can verify if the SystemTap static markers are present in the built |
||||
+binary by seeing if it contains a ".note.stapsdt" section. |
||||
+ |
||||
+.. code-block:: bash |
||||
+ |
||||
+ $ eu-readelf -S ./python | grep .note.stapsdt |
||||
+ [29] .note.stapsdt NOTE 0000000000000000 00308d78 000000b8 0 0 0 4 |
||||
+ |
||||
+If you've built python as a shared library (with --enable-shared), you need |
||||
+to look instead within the shared library. For example: |
||||
+ |
||||
+.. code-block:: bash |
||||
+ |
||||
+ $ eu-readelf -S libpython3.3dm.so.1.0 | grep .note.stapsdt |
||||
+ [28] .note.stapsdt NOTE 0000000000000000 00365b68 000000b8 0 0 0 4 |
||||
+ |
||||
+Earlier versions of SystemTap stored the markers in a ".probes" section. |
||||
+ |
||||
+For the curious, you can see the metadata for the static markers using this |
||||
+invocation. |
||||
+ |
||||
+.. code-block:: bash |
||||
+ |
||||
+ $ eu-readelf -x .note.stapsdt ./python |
||||
+ |
||||
+ Hex dump of section [29] '.note.stapsdt', 184 bytes at offset 0x308d78: |
||||
+ 0x00000000 08000000 45000000 03000000 73746170 ....E.......stap |
||||
+ 0x00000010 73647400 d4664b00 00000000 4fc36600 sdt..fK.....O.f. |
||||
+ 0x00000020 00000000 488d9000 00000000 70797468 ....H.......pyth |
||||
+ 0x00000030 6f6e0066 756e6374 696f6e5f 5f656e74 on.function__ent |
||||
+ 0x00000040 72790038 40257261 78203840 25726478 ry.8@%rax 8@%rdx |
||||
+ 0x00000050 202d3440 25656378 00000000 08000000 -4@%ecx........ |
||||
+ 0x00000060 46000000 03000000 73746170 73647400 F.......stapsdt. |
||||
+ 0x00000070 0d674b00 00000000 4fc36600 00000000 .gK.....O.f..... |
||||
+ 0x00000080 4a8d9000 00000000 70797468 6f6e0066 J.......python.f |
||||
+ 0x00000090 756e6374 696f6e5f 5f726574 75726e00 unction__return. |
||||
+ 0x000000a0 38402572 61782038 40257264 78202d34 8@%rax 8@%rdx -4 |
||||
+ 0x000000b0 40256563 78000000 @%ecx... |
||||
+ |
||||
+and a sufficiently modern eu-readelf can print the metadata: |
||||
+ |
||||
+.. code-block:: bash |
||||
+ |
||||
+ $ eu-readelf -n ./python |
||||
+ |
||||
+ Note section [ 1] '.note.gnu.build-id' of 36 bytes at offset 0x190: |
||||
+ Owner Data size Type |
||||
+ GNU 20 GNU_BUILD_ID |
||||
+ Build ID: a28f8db1b224530b0d38ad7b82a249cf7c3f18d6 |
||||
+ |
||||
+ Note section [27] '.note.stapsdt' of 184 bytes at offset 0x1ae884: |
||||
+ Owner Data size Type |
||||
+ stapsdt 70 Version: 3 |
||||
+ PC: 0xe0d3a, Base: 0x14b150, Semaphore: 0x3ae882 |
||||
+ Provider: python, Name: function__return, Args: '8@%rbx 8@%r13 -4@%eax' |
||||
+ stapsdt 69 Version: 3 |
||||
+ PC: 0xe0f37, Base: 0x14b150, Semaphore: 0x3ae880 |
||||
+ Provider: python, Name: function__entry, Args: '8@%rbx 8@%r13 -4@%eax' |
||||
+ |
||||
+The above metadata contains information for SystemTap describing how it can |
||||
+patch strategically-placed machine code instructions to enable the tracing |
||||
+hooks used by a SystemTap script. |
||||
+ |
||||
+ |
||||
+Static markers |
||||
+-------------- |
||||
+ |
||||
+The low-level way to use the SystemTap integration is to use the static |
||||
+markers directly. This requires you to explicitly state the binary file |
||||
+containing them. |
||||
+ |
||||
+For example, this script can be used to show the call/return hierarchy of a |
||||
+Python script: |
||||
+ |
||||
+.. code-block:: c |
||||
+ |
||||
+ probe process('python').mark("function__entry") { |
||||
+ filename = user_string($arg1); |
||||
+ funcname = user_string($arg2); |
||||
+ lineno = $arg3; |
||||
+ |
||||
+ printf("%s => %s in %s:%d\\n", |
||||
+ thread_indent(1), funcname, filename, lineno); |
||||
+ } |
||||
+ |
||||
+ probe process('python').mark("function__return") { |
||||
+ filename = user_string($arg1); |
||||
+ funcname = user_string($arg2); |
||||
+ lineno = $arg3; |
||||
+ |
||||
+ printf("%s <= %s in %s:%d\\n", |
||||
+ thread_indent(-1), funcname, filename, lineno); |
||||
+ } |
||||
+ |
||||
+It can be invoked like this: |
||||
+ |
||||
+.. code-block:: bash |
||||
+ |
||||
+ $ stap \ |
||||
+ show-call-hierarchy.stp \ |
||||
+ -c ./python test.py |
||||
+ |
||||
+The output looks like this:: |
||||
+ |
||||
+ 11408 python(8274): => __contains__ in Lib/_abcoll.py:362 |
||||
+ 11414 python(8274): => __getitem__ in Lib/os.py:425 |
||||
+ 11418 python(8274): => encode in Lib/os.py:490 |
||||
+ 11424 python(8274): <= encode in Lib/os.py:493 |
||||
+ 11428 python(8274): <= __getitem__ in Lib/os.py:426 |
||||
+ 11433 python(8274): <= __contains__ in Lib/_abcoll.py:366 |
||||
+ |
||||
+where the columns are: |
||||
+ |
||||
+ - time in microseconds since start of script |
||||
+ |
||||
+ - name of executable |
||||
+ |
||||
+ - PID of process |
||||
+ |
||||
+and the remainder indicates the call/return hierarchy as the script executes. |
||||
+ |
||||
+For a `--enable-shared` build of CPython, the markers are contained within the |
||||
+libpython shared library, and the probe's dotted path needs to reflect this. For |
||||
+example, this line from the above example:: |
||||
+ |
||||
+ probe process('python').mark("function__entry") { |
||||
+ |
||||
+should instead read:: |
||||
+ |
||||
+ probe process('python').library("libpython3.3dm.so.1.0").mark("function__entry") { |
||||
+ |
||||
+(assuming a debug build of CPython 3.3) |
||||
+ |
||||
+.. I'm reusing the "c:function" type for markers |
||||
+ |
||||
+.. c:function:: function__entry(str filename, str funcname, int lineno) |
||||
+ |
||||
+ This marker indicates that execution of a Python function has begun. It is |
||||
+ only triggered for pure-python (bytecode) functions. |
||||
+ |
||||
+ The filename, function name, and line number are provided back to the |
||||
+ tracing script as positional arguments, which must be accessed using |
||||
+ `$arg1`, `$arg2`: |
||||
+ |
||||
+ * `$arg1` : `(const char *)` filename, accessible using `user_string($arg1)` |
||||
+ |
||||
+ * `$arg2` : `(const char *)` function name, accessible using |
||||
+ `user_string($arg2)` |
||||
+ |
||||
+ * `$arg3` : `int` line number |
||||
+ |
||||
+ * `$arg4` : `(PyFrameObject *)`, the frame being executed |
||||
+ |
||||
+.. c:function:: function__return(str filename, str funcname, int lineno) |
||||
+ |
||||
+ This marker is the converse of `function__entry`, and indicates that |
||||
+ execution of a Python function has ended (either via ``return``, or via an |
||||
+ exception). It is only triggered for pure-python (bytecode) functions. |
||||
+ |
||||
+ The arguments are the same as for `function__entry` |
||||
+ |
||||
+ |
||||
+Tapsets |
||||
+------- |
||||
+ |
||||
+The higher-level way to use the SystemTap integration is to use a "tapset": |
||||
+SystemTap's equivalent of a library, which hides some of the lower-level |
||||
+details of the static markers. |
||||
+ |
||||
+Here is a tapset file, based on a non-shared build of CPython: |
||||
+ |
||||
+.. code-block:: c |
||||
+ |
||||
+ /* |
||||
+ Provide a higher-level wrapping around the function__entry and |
||||
+ function__return markers: |
||||
+ */ |
||||
+ probe python.function.entry = process("python").mark("function__entry") |
||||
+ { |
||||
+ filename = user_string($arg1); |
||||
+ funcname = user_string($arg2); |
||||
+ lineno = $arg3; |
||||
+ frameptr = $arg4 |
||||
+ } |
||||
+ probe python.function.return = process("python").mark("function__return") |
||||
+ { |
||||
+ filename = user_string($arg1); |
||||
+ funcname = user_string($arg2); |
||||
+ lineno = $arg3; |
||||
+ frameptr = $arg4 |
||||
+ } |
||||
+ |
||||
+If this file is installed in SystemTap's tapset directory (e.g. |
||||
+`/usr/share/systemtap/tapset`), then these additional probepoints become |
||||
+available: |
||||
+ |
||||
+.. c:function:: python.function.entry(str filename, str funcname, int lineno, frameptr) |
||||
+ |
||||
+ This probe point indicates that execution of a Python function has begun. |
||||
+ It is only triggered for pure-python (bytecode) functions. |
||||
+ |
||||
+.. c:function:: python.function.return(str filename, str funcname, int lineno, frameptr) |
||||
+ |
||||
+ This probe point is the converse of `python.function.return`, and indicates |
||||
+ that execution of a Python function has ended (either via ``return``, or |
||||
+ via an exception). It is only triggered for pure-python (bytecode) functions. |
||||
+ |
||||
+ |
||||
+Examples |
||||
+-------- |
||||
+This SystemTap script uses the tapset above to more cleanly implement the |
||||
+example given above of tracing the Python function-call hierarchy, without |
||||
+needing to directly name the static markers: |
||||
+ |
||||
+.. code-block:: c |
||||
+ |
||||
+ probe python.function.entry |
||||
+ { |
||||
+ printf("%s => %s in %s:%d\n", |
||||
+ thread_indent(1), funcname, filename, lineno); |
||||
+ } |
||||
+ |
||||
+ probe python.function.return |
||||
+ { |
||||
+ printf("%s <= %s in %s:%d\n", |
||||
+ thread_indent(-1), funcname, filename, lineno); |
||||
+ } |
||||
+ |
||||
+ |
||||
+The following script uses the tapset above to provide a top-like view of all |
||||
+running CPython code, showing the top 20 most frequently-entered bytecode |
||||
+frames, each second, across the whole system: |
||||
+ |
||||
+.. code-block:: c |
||||
+ |
||||
+ global fn_calls; |
||||
+ |
||||
+ probe python.function.entry |
||||
+ { |
||||
+ fn_calls[pid(), filename, funcname, lineno] += 1; |
||||
+ } |
||||
+ |
||||
+ probe timer.ms(1000) { |
||||
+ printf("\033[2J\033[1;1H") /* clear screen */ |
||||
+ printf("%6s %80s %6s %30s %6s\n", |
||||
+ "PID", "FILENAME", "LINE", "FUNCTION", "CALLS") |
||||
+ foreach ([pid, filename, funcname, lineno] in fn_calls- limit 20) { |
||||
+ printf("%6d %80s %6d %30s %6d\n", |
||||
+ pid, filename, lineno, funcname, |
||||
+ fn_calls[pid, filename, funcname, lineno]); |
||||
+ } |
||||
+ delete fn_calls; |
||||
+ } |
||||
+ |
||||
diff -up Python-3.3.0rc2/Lib/test/test_systemtap.py.systemtap Python-3.3.0rc2/Lib/test/test_systemtap.py |
||||
--- Python-3.3.0rc2/Lib/test/test_systemtap.py.systemtap 2012-09-10 09:17:21.117511779 -0400 |
||||
+++ Python-3.3.0rc2/Lib/test/test_systemtap.py 2012-09-10 09:17:21.117511779 -0400 |
||||
@@ -0,0 +1,234 @@ |
||||
+# Verify that systemtap static probes work |
||||
+# |
||||
+import subprocess |
||||
+import sys |
||||
+import sysconfig |
||||
+import os |
||||
+import unittest |
||||
+ |
||||
+from test.support import run_unittest, TESTFN, unlink |
||||
+ |
||||
+if '--with-systemtap' not in sysconfig.get_config_var('CONFIG_ARGS'): |
||||
+ raise unittest.SkipTest("Python was not configured --with-systemtap") |
||||
+ |
||||
+try: |
||||
+ _, stap_version = subprocess.Popen(["stap", "-V"], |
||||
+ stdout=subprocess.PIPE, |
||||
+ stderr=subprocess.PIPE, |
||||
+ ).communicate() |
||||
+except OSError: |
||||
+ # This is what "no stap" looks like. There may, however, be other |
||||
+ # errors that manifest this way too. |
||||
+ raise unittest.SkipTest("Couldn't find stap on the path") |
||||
+ |
||||
+def invoke_systemtap_script(script, cmd): |
||||
+ # Start a child process, probing with the given systemtap script |
||||
+ # (passed as stdin to the "stap" tool) |
||||
+ # The script should be a bytes instance |
||||
+ # Return (stdout, stderr) pair |
||||
+ |
||||
+ p = subprocess.Popen(["stap", "-", '-vv', '-c', cmd], |
||||
+ stdin=subprocess.PIPE, |
||||
+ stdout=subprocess.PIPE, |
||||
+ stderr=subprocess.PIPE) |
||||
+ out, err = p.communicate(input=script) |
||||
+ return out, err |
||||
+ |
||||
+# Verify that stap can run a simple "hello world"-style script |
||||
+# This can fail for various reasons: |
||||
+# - missing kernel headers |
||||
+# - permissions (a non-root user needs to be in the "stapdev" group) |
||||
+TRIVIAL_STAP_SCRIPT = b'probe begin { println("hello world") exit () }' |
||||
+ |
||||
+out, err = invoke_systemtap_script(TRIVIAL_STAP_SCRIPT, 'true') |
||||
+if out != b'hello world\n': |
||||
+ raise unittest.SkipTest("Test systemtap script did not run; stderr was: %s" % err) |
||||
+ |
||||
+# We don't expect stderr to be empty, since we're invoking stap with "-vv": stap |
||||
+# will (we hope) generate debugging output on stderr. |
||||
+ |
||||
+def invoke_python_under_systemtap(script, pythoncode=None, pythonfile=None): |
||||
+ # Start a child python process, probing with the given systemtap script |
||||
+ # (passed as stdin to the "stap" tool) |
||||
+ # The script should be a bytes instance |
||||
+ # Return (stdout, stderr) pair |
||||
+ |
||||
+ if pythonfile: |
||||
+ pythoncmd = '%s %s' % (sys.executable, pythonfile) |
||||
+ else: |
||||
+ pythoncmd = '%s -c %r' % (sys.executable, pythoncode) |
||||
+ |
||||
+ # The process tree of a stap invocation of a command goes through |
||||
+ # something like this: |
||||
+ # stap ->fork/exec(staprun; exec stapio ->f/e(-c cmd); exec staprun -r) |
||||
+ # and this trip through setuid leads to LD_LIBRARY_PATH being dropped, |
||||
+ # which would lead to an --enable-shared build of python failing to be |
||||
+ # find its libpython, with an error like: |
||||
+ # error while loading shared libraries: libpython3.3dm.so.1.0: cannot |
||||
+ # open shared object file: No such file or directory |
||||
+ # Hence we need to jump through some hoops to expose LD_LIBRARY_PATH to |
||||
+ # the invoked python process: |
||||
+ LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') |
||||
+ if LD_LIBRARY_PATH: |
||||
+ pythoncmd = 'env LD_LIBRARY_PATH=%s ' % LD_LIBRARY_PATH + pythoncmd |
||||
+ |
||||
+ return invoke_systemtap_script(script, pythoncmd) |
||||
+ |
||||
+# When using the static markers, we need to supply the prefix of a systemtap |
||||
+# dotted probe point that containing the marker. |
||||
+# See http://sourceware.org/systemtap/langref/Probe_points.html |
||||
+# |
||||
+# We need to determine if this is a shared-library build |
||||
+# |
||||
+# Note that sysconfig can get this wrong; see: |
||||
+# http://bugs.python.org/issue14774 |
||||
+# |
||||
+if '--enable-shared' in sysconfig.get_config_var('CONFIG_ARGS'): |
||||
+ # For a shared-library build, the markers are in library(INSTSONAME): |
||||
+ INSTSONAME = sysconfig.get_config_var('INSTSONAME') |
||||
+ probe_prefix = 'process("%s").library("%s")' % (sys.executable, INSTSONAME) |
||||
+else: |
||||
+ # For a non-shared-library build, we can simply use sys.executable: |
||||
+ probe_prefix = 'process("%s")' % sys.executable |
||||
+ |
||||
+# The following script ought to generate lots of lines showing recursive |
||||
+# function entry and return, of the form: |
||||
+# 11408 python(8274): => __contains__ in Lib/_abcoll.py:362 |
||||
+# 11414 python(8274): => __getitem__ in Lib/os.py:425 |
||||
+# 11418 python(8274): => encode in Lib/os.py:490 |
||||
+# 11424 python(8274): <= encode in Lib/os.py:493 |
||||
+# 11428 python(8274): <= __getitem__ in Lib/os.py:426 |
||||
+# 11433 python(8274): <= __contains__ in Lib/_abcoll.py:366 |
||||
+# where the column are: |
||||
+# - time in microseconds since start of script |
||||
+# - name of executable |
||||
+# - PID of process |
||||
+# and the remainder indicates the call/return hierarchy |
||||
+ |
||||
+hierarchy_script = (''' |
||||
+probe %s.mark("function__entry") { |
||||
+ filename = user_string($arg1); |
||||
+ funcname = user_string($arg2); |
||||
+ lineno = $arg3; |
||||
+ |
||||
+ printf("%%s => %%s in %%s:%%d\\n", thread_indent(1), funcname, filename, lineno); |
||||
+} |
||||
+ |
||||
+probe %s.mark("function__return") { |
||||
+ filename = user_string($arg1); |
||||
+ funcname = user_string($arg2); |
||||
+ lineno = $arg3; |
||||
+ |
||||
+ printf("%%s <= %%s in %%s:%%d\\n", thread_indent(-1), funcname, filename, lineno); |
||||
+} |
||||
+''' % (probe_prefix, probe_prefix)).encode('utf-8') |
||||
+ |
||||
+ |
||||
+class ErrorDumper: |
||||
+ # A context manager that dumps extra information if an exception is raised, |
||||
+ # to help track down why the problem occurred |
||||
+ def __init__(self, out, err): |
||||
+ self.out = out |
||||
+ self.err = err |
||||
+ |
||||
+ def __enter__(self): |
||||
+ pass |
||||
+ |
||||
+ def __exit__(self, type_, value, traceback): |
||||
+ if type_: |
||||
+ # an exception is being raised: |
||||
+ print('stdout: %s' % out.decode()) |
||||
+ print('stderr: %s' % err.decode()) |
||||
+ |
||||
+class SystemtapTests(unittest.TestCase): |
||||
+ |
||||
+ def test_invoking_python(self): |
||||
+ # Ensure that we can invoke python under stap, with a trivial stap |
||||
+ # script: |
||||
+ out, err = invoke_python_under_systemtap( |
||||
+ b'probe begin { println("hello from stap") exit () }', |
||||
+ pythoncode="print('hello from python')") |
||||
+ with ErrorDumper(out, err): |
||||
+ self.assertIn(b'hello from stap', out) |
||||
+ self.assertIn(b'hello from python', out) |
||||
+ |
||||
+ def test_function_entry(self): |
||||
+ # Ensure that the function_entry static marker works |
||||
+ out, err = invoke_python_under_systemtap(hierarchy_script) |
||||
+ # stdout ought to contain various lines showing recursive function |
||||
+ # entry and return (see above) |
||||
+ |
||||
+ # Uncomment this for debugging purposes: |
||||
+ # print(out.decode('utf-8')) |
||||
+ |
||||
+ # Executing the cmdline-supplied "pass": |
||||
+ # 0 python(8274): => <module> in <string>:1 |
||||
+ # 5 python(8274): <= <module> in <string>:1 |
||||
+ with ErrorDumper(out, err): |
||||
+ self.assertIn(b'=> <module> in <string>:1', out, |
||||
+ msg="stdout: %s\nstderr: %s\n" % (out, err)) |
||||
+ |
||||
+ def test_function_encoding(self): |
||||
+ # Ensure that function names containing non-Latin 1 code |
||||
+ # points are handled: |
||||
+ pythonfile = TESTFN |
||||
+ try: |
||||
+ unlink(pythonfile) |
||||
+ f = open(pythonfile, "wb") |
||||
+ f.write(""" |
||||
+# Sample script with non-ASCII filename, for use by test_systemtap.py |
||||
+# Implicitly UTF-8 |
||||
+ |
||||
+def 文字化け(): |
||||
+ '''Function with non-ASCII identifier; I believe this reads "mojibake"''' |
||||
+ print("hello world!") |
||||
+ |
||||
+文字化け() |
||||
+""".encode('utf-8')) |
||||
+ f.close() |
||||
+ |
||||
+ out, err = invoke_python_under_systemtap(hierarchy_script, |
||||
+ pythonfile=pythonfile) |
||||
+ out_utf8 = out.decode('utf-8') |
||||
+ with ErrorDumper(out, err): |
||||
+ self.assertIn('=> <module> in %s:5' % pythonfile, out_utf8) |
||||
+ self.assertIn(' => 文字化け in %s:5' % pythonfile, out_utf8) |
||||
+ self.assertIn(' <= 文字化け in %s:7' % pythonfile, out_utf8) |
||||
+ self.assertIn('<= <module> in %s:9' % pythonfile, out_utf8) |
||||
+ finally: |
||||
+ unlink(pythonfile) |
||||
+ |
||||
+ @unittest.skipIf(sys.getfilesystemencoding() == 'ascii', |
||||
+ 'the test filename is not encodable with ASCII') |
||||
+ def test_filename_encoding(self): |
||||
+ # Ensure that scripts names containing non-Latin 1 code |
||||
+ # points are handled: |
||||
+ pythonfile = TESTFN + '_☠.py' |
||||
+ try: |
||||
+ unlink(pythonfile) |
||||
+ f = open(pythonfile, "wb") |
||||
+ f.write(""" |
||||
+def foo(): |
||||
+ '''Function with non-ASCII identifier; I believe this reads "mojibake"''' |
||||
+ print("hello world!") |
||||
+ |
||||
+foo() |
||||
+""".encode('utf-8')) |
||||
+ f.close() |
||||
+ |
||||
+ out, err = invoke_python_under_systemtap(hierarchy_script, |
||||
+ pythonfile=pythonfile) |
||||
+ out_utf8 = out.decode('utf-8') |
||||
+ with ErrorDumper(out, err): |
||||
+ self.assertIn('=> <module> in %s:2' % pythonfile, out_utf8) |
||||
+ self.assertIn(' => foo in %s:2' % pythonfile, out_utf8) |
||||
+ self.assertIn(' <= foo in %s:4' % pythonfile, out_utf8) |
||||
+ self.assertIn('<= <module> in %s:6' % pythonfile, out_utf8) |
||||
+ finally: |
||||
+ unlink(pythonfile) |
||||
+ |
||||
+def test_main(): |
||||
+ run_unittest(SystemtapTests) |
||||
+ |
||||
+if __name__ == "__main__": |
||||
+ test_main() |
||||
diff -up Python-3.3.0rc2/Makefile.pre.in.systemtap Python-3.3.0rc2/Makefile.pre.in |
||||
--- Python-3.3.0rc2/Makefile.pre.in.systemtap 2012-09-09 05:11:05.000000000 -0400 |
||||
+++ Python-3.3.0rc2/Makefile.pre.in 2012-09-10 09:19:51.195501518 -0400 |
||||
@@ -363,6 +363,7 @@ PYTHON_OBJS= \ |
||||
+#pragma D attributes Evolving/Evolving/Common provider python provider |
||||
+#pragma D attributes Private/Private/Common provider python module |
||||
+#pragma D attributes Private/Private/Common provider python function |
||||
+#pragma D attributes Evolving/Evolving/Common provider python name |
||||
+#pragma D attributes Evolving/Evolving/Common provider python args |
||||
diff -up Python-2.7rc1/Makefile.pre.in.systemtap Python-2.7rc1/Makefile.pre.in |
||||
--- Python-2.7rc1/Makefile.pre.in.systemtap 2010-06-06 10:53:15.488978775 -0400 |
||||
+++ Python-2.7rc1/Makefile.pre.in 2010-06-06 11:05:30.411100568 -0400 |
||||
@@ -298,6 +298,7 @@ PYTHON_OBJS= \ |
||||
Python/formatter_unicode.o \ |
||||
Python/fileutils.o \ |
||||
Python/formatter_string.o \ |
||||
Python/$(DYNLOADFILE) \ |
||||
+ @SYSTEMTAPOBJS@ \ |
||||
+ @DTRACEOBJS@ \ |
||||
$(LIBOBJS) \ |
||||
$(MACHDEP_OBJS) \ |
||||
$(THREADOBJ) |
||||
@@ -713,7 +714,8 @@ Objects/setobject.o: $(srcdir)/Objects/s |
||||
$(OPCODETARGETS_H): $(OPCODETARGETGEN_FILES) |
||||
$(OPCODETARGETGEN) $(OPCODETARGETS_H) |
||||
|
||||
-Python/ceval.o: $(OPCODETARGETS_H) $(srcdir)/Python/ceval_gil.h |
||||
+Python/ceval.o: $(OPCODETARGETS_H) $(srcdir)/Python/ceval_gil.h \ |
||||
+ $(srcdir)/Python/ceval_systemtap.h @SYSTEMTAPDEPS@ |
||||
|
||||
Python/frozen.o: Python/importlib.h Python/importlib_external.h |
||||
@@ -599,6 +600,18 @@ Python/formatter_unicode.o: $(srcdir)/Py |
||||
Python/formatter_string.o: $(srcdir)/Python/formatter_string.c \ |
||||
$(STRINGLIB_HEADERS) |
||||
|
||||
@@ -724,6 +726,13 @@ Objects/typeobject.o: $(srcdir)/Objects/ |
||||
Objects/typeslots.inc: $(srcdir)/Include/typeslots.h $(srcdir)/Objects/typeslots.py |
||||
$(PYTHON) $(srcdir)/Objects/typeslots.py < $(srcdir)/Include/typeslots.h > Objects/typeslots.inc |
||||
|
||||
+# Only needed with --with-systemtap; not a public header: |
||||
+$(srcdir)/Python/pysystemtap.h: $(srcdir)/Python/pysystemtap.d |
||||
+ dtrace -o $@ $(DFLAGS) -C -h -s $(srcdir)/Python/pysystemtap.d |
||||
+# Only needed with --with-dtrace |
||||
+buildinclude: |
||||
+ mkdir -p Include |
||||
+ |
||||
+Include/pydtrace.h: buildinclude $(srcdir)/Include/pydtrace.d |
||||
+ dtrace -o $@ $(DFLAGS) -C -h -s $(srcdir)/Include/pydtrace.d |
||||
+ |
||||
+Python/pysystemtap.o: $(srcdir)/Python/pysystemtap.d Python/ceval.o |
||||
+ dtrace -o $@ $(DFLAGS) -C -G -s $(srcdir)/Python/pysystemtap.d Python/ceval.o |
||||
+Python/ceval.o: Include/pydtrace.h |
||||
+ |
||||
+Python/dtrace.o: buildinclude $(srcdir)/Include/pydtrace.d Python/ceval.o |
||||
+ dtrace -o $@ $(DFLAGS) -C -G -s $(srcdir)/Include/pydtrace.d Python/ceval.o |
||||
+ |
||||
############################################################################ |
||||
# Header files |
||||
|
||||
@@ -1345,6 +1354,7 @@ clean: pycremoval |
||||
-rm -f Lib/lib2to3/*Grammar*.pickle |
||||
-rm -f Programs/_testembed Programs/_freeze_importlib |
||||
-rm -rf build |
||||
+ -rm -f $(srcdir)/Python/pysystemtap.h |
||||
|
||||
profile-removal: |
||||
find . -name '*.gc??' -exec rm -f {} ';' |
||||
diff -up Python-3.3.0rc2/pyconfig.h.in.systemtap Python-3.3.0rc2/pyconfig.h.in |
||||
--- Python-3.3.0rc2/pyconfig.h.in.systemtap 2012-09-09 05:11:14.000000000 -0400 |
||||
+++ Python-3.3.0rc2/pyconfig.h.in 2012-09-10 09:17:21.120511781 -0400 |
||||
@@ -1306,6 +1306,9 @@ |
||||
/* Define if you want to compile in Python-specific mallocs */ |
||||
#undef WITH_PYMALLOC |
||||
|
||||
+/* Define if you want to compile in SystemTap support */ |
||||
+#undef WITH_SYSTEMTAP |
||||
+ |
||||
/* Define if you want to compile in rudimentary thread support */ |
||||
#undef WITH_THREAD |
||||
|
||||
diff -up Python-3.3.0rc2/Python/ceval.c.systemtap Python-3.3.0rc2/Python/ceval.c |
||||
--- Python-3.3.0rc2/Python/ceval.c.systemtap 2012-09-09 05:11:12.000000000 -0400 |
||||
+++ Python-3.3.0rc2/Python/ceval.c 2012-09-10 09:17:21.122511781 -0400 |
||||
@@ -18,6 +18,8 @@ |
||||
@@ -1251,7 +1264,7 @@ Python/thread.o: @THREADHEADERS@ |
||||
.PHONY: frameworkinstall frameworkinstallframework frameworkinstallstructure |
||||
.PHONY: frameworkinstallmaclib frameworkinstallapps frameworkinstallunixtools |
||||
.PHONY: frameworkaltinstallunixtools recheck autoconf clean clobber distclean |
||||
-.PHONY: smelly funny patchcheck touch altmaninstall |
||||
+.PHONY: smelly funny patchcheck touch altmaninstall buildinclude |
||||
.PHONY: gdbhooks |
||||
|
||||
# IF YOU PUT ANYTHING HERE IT WILL GO AWAY |
||||
diff -up Python-2.7rc1/pyconfig.h.in.systemtap Python-2.7rc1/pyconfig.h.in |
||||
--- Python-2.7rc1/pyconfig.h.in.systemtap 2010-05-08 07:04:18.000000000 -0400 |
||||
+++ Python-2.7rc1/pyconfig.h.in 2010-06-06 10:53:15.521974070 -0400 |
||||
@@ -1074,6 +1074,9 @@ |
||||
/* Define if you want documentation strings in extension modules */ |
||||
#undef WITH_DOC_STRINGS |
||||
|
||||
+/* Define if you want to compile in Dtrace support */ |
||||
+#undef WITH_DTRACE |
||||
+ |
||||
/* Define if you want to use the new-style (Openstep, Rhapsody, MacOS) dynamic |
||||
linker (dyld) instead of the old-style (NextStep) dynamic linker (rld). |
||||
Dyld is necessary to support frameworks. */ |
||||
diff -up Python-2.7rc1/Python/ceval.c.systemtap Python-2.7rc1/Python/ceval.c |
||||
--- Python-2.7rc1/Python/ceval.c.systemtap 2010-05-09 10:46:46.000000000 -0400 |
||||
+++ Python-2.7rc1/Python/ceval.c 2010-06-06 11:08:40.683100500 -0400 |
||||
@@ -19,6 +19,10 @@ |
||||
|
||||
#include <ctype.h> |
||||
|
||||
+#include "ceval_systemtap.h" |
||||
+#ifdef WITH_DTRACE |
||||
+#include "pydtrace.h" |
||||
+#endif |
||||
+ |
||||
#ifndef WITH_TSC |
||||
|
||||
#define READ_TIMESTAMP(var) |
||||
@@ -1160,6 +1162,10 @@ PyEval_EvalFrameEx(PyFrameObject *f, int |
||||
} |
||||
@@ -671,6 +675,55 @@ PyEval_EvalCode(PyCodeObject *co, PyObje |
||||
NULL); |
||||
} |
||||
|
||||
+ if (PYTHON_FUNCTION_ENTRY_ENABLED()) { |
||||
+ systemtap_function_entry(f); |
||||
+ } |
||||
+ |
||||
co = f->f_code; |
||||
names = co->co_names; |
||||
consts = co->co_consts; |
||||
@@ -3077,6 +3083,11 @@ fast_yield: |
||||
|
||||
/* pop frame */ |
||||
exit_eval_frame: |
||||
+ |
||||
+ if (PYTHON_FUNCTION_RETURN_ENABLED()) { |
||||
+ systemtap_function_return(f); |
||||
+ } |
||||
+ |
||||
Py_LeaveRecursiveCall(); |
||||
f->f_executing = 0; |
||||
tstate->frame = f->f_back; |
||||
diff -up Python-3.3.0rc2/Python/ceval_systemtap.h.systemtap Python-3.3.0rc2/Python/ceval_systemtap.h |
||||
--- Python-3.3.0rc2/Python/ceval_systemtap.h.systemtap 2012-09-10 09:17:21.122511781 -0400 |
||||
+++ Python-3.3.0rc2/Python/ceval_systemtap.h 2012-09-10 09:17:21.122511781 -0400 |
||||
@@ -0,0 +1,86 @@ |
||||
+/* |
||||
+ Support for SystemTap static markers |
||||
+*/ |
||||
+ |
||||
+#ifdef WITH_SYSTEMTAP |
||||
+ |
||||
+#include "pysystemtap.h" |
||||
+ |
||||
+/* |
||||
+ A struct to hold all of the information gathered when one of the traceable |
||||
+ markers is triggered |
||||
+*/ |
||||
+struct frame_marker_info |
||||
+#ifdef WITH_DTRACE |
||||
+static void |
||||
+dtrace_entry(PyFrameObject *f) |
||||
+{ |
||||
+ PyObject *filename_obj; |
||||
+ PyObject *funcname_obj; |
||||
+ const char *filename; |
||||
+ const char *funcname; |
||||
+ const char *fname; |
||||
+ int lineno; |
||||
+}; |
||||
+ |
||||
+static void |
||||
+get_frame_marker_info(PyFrameObject *f, struct frame_marker_info *fmi) |
||||
+{ |
||||
+ PyObject *ptype; |
||||
+ PyObject *pvalue; |
||||
+ PyObject *ptraceback; |
||||
+ filename = PyString_AsString(f->f_code->co_filename); |
||||
+ fname = PyString_AsString(f->f_code->co_name); |
||||
+ lineno = PyCode_Addr2Line(f->f_code, f->f_lasti); |
||||
+ |
||||
+ PyErr_Fetch(&ptype, &pvalue, &ptraceback); |
||||
+ |
||||
+ fmi->filename_obj = PyUnicode_EncodeFSDefault(f->f_code->co_filename); |
||||
+ if (fmi->filename_obj) { |
||||
+ fmi->filename = PyBytes_AsString(fmi->filename_obj); |
||||
+ } else { |
||||
+ fmi->filename = NULL; |
||||
+ } |
||||
+ |
||||
+ fmi->funcname_obj = PyUnicode_AsUTF8String(f->f_code->co_name); |
||||
+ if (fmi->funcname_obj) { |
||||
+ fmi->funcname = PyBytes_AsString(fmi->funcname_obj); |
||||
+ } else { |
||||
+ fmi->funcname = NULL; |
||||
+ } |
||||
+ |
||||
+ fmi->lineno = PyCode_Addr2Line(f->f_code, f->f_lasti); |
||||
+ |
||||
+ PyErr_Restore(ptype, pvalue, ptraceback); |
||||
+ PYTHON_FUNCTION_ENTRY((char *)filename, (char *)fname, lineno); |
||||
+ |
||||
+ /* |
||||
+ * Currently a USDT tail-call will not receive the correct arguments. |
||||
+ * Disable the tail call here. |
||||
+ */ |
||||
+#if defined(__sparc) |
||||
+ asm("nop"); |
||||
+#endif |
||||
+} |
||||
+ |
||||
+static void |
||||
+release_frame_marker_info(struct frame_marker_info *fmi) |
||||
+dtrace_return(PyFrameObject *f) |
||||
+{ |
||||
+ Py_XDECREF(fmi->filename_obj); |
||||
+ Py_XDECREF(fmi->funcname_obj); |
||||
+} |
||||
+ const char *filename; |
||||
+ const char *fname; |
||||
+ int lineno; |
||||
+ |
||||
+static void |
||||
+systemtap_function_entry(PyFrameObject *f) |
||||
+{ |
||||
+ struct frame_marker_info fmi; |
||||
+ get_frame_marker_info(f, &fmi); |
||||
+ PYTHON_FUNCTION_ENTRY(fmi.filename, fmi.funcname, fmi.lineno, f); |
||||
+ release_frame_marker_info(&fmi); |
||||
+} |
||||
+ filename = PyString_AsString(f->f_code->co_filename); |
||||
+ fname = PyString_AsString(f->f_code->co_name); |
||||
+ lineno = PyCode_Addr2Line(f->f_code, f->f_lasti); |
||||
+ PYTHON_FUNCTION_RETURN((char *)filename, (char *)fname, lineno); |
||||
+ |
||||
+static void |
||||
+systemtap_function_return(PyFrameObject *f) |
||||
+{ |
||||
+ struct frame_marker_info fmi; |
||||
+ get_frame_marker_info(f, &fmi); |
||||
+ PYTHON_FUNCTION_RETURN(fmi.filename, fmi.funcname, fmi.lineno, f); |
||||
+ release_frame_marker_info(&fmi); |
||||
+ /* |
||||
+ * Currently a USDT tail-call will not receive the correct arguments. |
||||
+ * Disable the tail call here. |
||||
+ */ |
||||
+#if defined(__sparc) |
||||
+ asm("nop"); |
||||
+#endif |
||||
+} |
||||
+ |
||||
+#else /* #ifdef WITH_SYSTEMTAP */ |
||||
+ |
||||
+/* |
||||
+ When configured --without-systemtap, everything compiles away to nothing: |
||||
+*/ |
||||
+#else |
||||
+#define PYTHON_FUNCTION_ENTRY_ENABLED() 0 |
||||
+#define PYTHON_FUNCTION_RETURN_ENABLED() 0 |
||||
+#define systemtap_function_entry(f) |
||||
+#define systemtap_function_return(f) |
||||
+ |
||||
+#define dtrace_entry(f) |
||||
+#define dtrace_return(f) |
||||
+#endif |
||||
diff -up Python-3.3.0rc2/Python/pysystemtap.d.systemtap Python-3.3.0rc2/Python/pysystemtap.d |
||||
--- Python-3.3.0rc2/Python/pysystemtap.d.systemtap 2012-09-10 09:17:21.122511781 -0400 |
||||
+++ Python-3.3.0rc2/Python/pysystemtap.d 2012-09-10 09:17:21.122511781 -0400 |
||||
@@ -0,0 +1,4 @@ |
||||
+provider python { |
||||
+ probe function__entry(const char *, const char *, int, PyFrameObject *); |
||||
+ probe function__return(const char *, const char *, int, PyFrameObject *); |
||||
+}; |
||||
|
||||
/* Interpreter main loop */ |
||||
|
||||
@@ -909,6 +962,9 @@ PyEval_EvalFrameEx(PyFrameObject *f, int |
||||
} |
||||
} |
||||
|
||||
+ if (PYTHON_FUNCTION_ENTRY_ENABLED()) |
||||
+ dtrace_entry(f); |
||||
+ |
||||
co = f->f_code; |
||||
names = co->co_names; |
||||
consts = co->co_consts; |
||||
@@ -3000,6 +3056,9 @@ fast_yield: |
||||
|
||||
/* pop frame */ |
||||
exit_eval_frame: |
||||
+ if (PYTHON_FUNCTION_RETURN_ENABLED()) |
||||
+ dtrace_return(f); |
||||
+ |
||||
Py_LeaveRecursiveCall(); |
||||
tstate->frame = f->f_back; |
||||
|
||||
|
@ -0,0 +1,50 @@
@@ -0,0 +1,50 @@
|
||||
diff -up Python-2.6.5/configure.ac.more-configuration-flags Python-2.6.5/configure.ac |
||||
--- Python-2.6.5/configure.ac.more-configuration-flags 2010-05-24 18:51:25.410111792 -0400 |
||||
+++ Python-2.6.5/configure.ac 2010-05-24 18:59:23.954986388 -0400 |
||||
@@ -2515,6 +2515,30 @@ else AC_MSG_RESULT(no) |
||||
fi], |
||||
[AC_MSG_RESULT(no)]) |
||||
|
||||
+AC_MSG_CHECKING(for --with-count-allocs) |
||||
+AC_ARG_WITH(count-allocs, |
||||
+[ --with(out)count-allocs enable/disable per-type instance accounting], [ |
||||
+if test "$withval" != no |
||||
+then |
||||
+ AC_DEFINE(COUNT_ALLOCS, 1, |
||||
+ [Define to keep records of the number of instances of each type]) |
||||
+ AC_MSG_RESULT(yes) |
||||
+else AC_MSG_RESULT(no) |
||||
+fi], |
||||
+[AC_MSG_RESULT(no)]) |
||||
+ |
||||
+AC_MSG_CHECKING(for --with-call-profile) |
||||
+AC_ARG_WITH(call-profile, |
||||
+[ --with(out)-call-profile enable/disable statistics on function call invocation], [ |
||||
+if test "$withval" != no |
||||
+then |
||||
+ AC_DEFINE(CALL_PROFILE, 1, |
||||
+ [Define to keep records on function call invocation]) |
||||
+ AC_MSG_RESULT(yes) |
||||
+else AC_MSG_RESULT(no) |
||||
+fi], |
||||
+[AC_MSG_RESULT(no)]) |
||||
+ |
||||
# Check for Python-specific malloc support |
||||
AC_MSG_CHECKING(for --with-pymalloc) |
||||
AC_ARG_WITH(pymalloc, |
||||
diff -up Python-2.6.5/pyconfig.h.in.more-configuration-flags Python-2.6.5/pyconfig.h.in |
||||
--- Python-2.6.5/pyconfig.h.in.more-configuration-flags 2010-05-24 18:51:45.677988086 -0400 |
||||
+++ Python-2.6.5/pyconfig.h.in 2010-05-24 19:00:44.163987730 -0400 |
||||
@@ -1019,6 +1019,12 @@ |
||||
/* Define to profile with the Pentium timestamp counter */ |
||||
#undef WITH_TSC |
||||
|
||||
+/* Define to keep records of the number of instances of each type */ |
||||
+#undef COUNT_ALLOCS |
||||
+ |
||||
+/* Define to keep records on function call invocation */ |
||||
+#undef CALL_PROFILE |
||||
+ |
||||
/* Define if you want pymalloc to be disabled when running under valgrind */ |
||||
#undef WITH_VALGRIND |
||||
|
@ -0,0 +1,47 @@
@@ -0,0 +1,47 @@
|
||||
diff -up Python-2.7rc1/Modules/posixmodule.c.statvfs-f-flag-constants Python-2.7rc1/Modules/posixmodule.c |
||||
--- Python-2.7rc1/Modules/posixmodule.c.statvfs-f-flag-constants 2010-05-15 17:45:30.000000000 -0400 |
||||
+++ Python-2.7rc1/Modules/posixmodule.c 2010-06-07 22:54:16.162068624 -0400 |
||||
@@ -9174,6 +9174,43 @@ all_ins(PyObject *d) |
||||
#endif |
||||
#endif |
||||
|
||||
+ /* These came from statvfs.h */ |
||||
+#ifdef ST_RDONLY |
||||
+ if (ins(d, "ST_RDONLY", (long)ST_RDONLY)) return -1; |
||||
+#endif /* ST_RDONLY */ |
||||
+#ifdef ST_NOSUID |
||||
+ if (ins(d, "ST_NOSUID", (long)ST_NOSUID)) return -1; |
||||
+#endif /* ST_NOSUID */ |
||||
+ |
||||
+ /* GNU extensions */ |
||||
+#ifdef ST_NODEV |
||||
+ if (ins(d, "ST_NODEV", (long)ST_NODEV)) return -1; |
||||
+#endif /* ST_NODEV */ |
||||
+#ifdef ST_NOEXEC |
||||
+ if (ins(d, "ST_NOEXEC", (long)ST_NOEXEC)) return -1; |
||||
+#endif /* ST_NOEXEC */ |
||||
+#ifdef ST_SYNCHRONOUS |
||||
+ if (ins(d, "ST_SYNCHRONOUS", (long)ST_SYNCHRONOUS)) return -1; |
||||
+#endif /* ST_SYNCHRONOUS */ |
||||
+#ifdef ST_MANDLOCK |
||||
+ if (ins(d, "ST_MANDLOCK", (long)ST_MANDLOCK)) return -1; |
||||
+#endif /* ST_MANDLOCK */ |
||||
+#ifdef ST_WRITE |
||||
+ if (ins(d, "ST_WRITE", (long)ST_WRITE)) return -1; |
||||
+#endif /* ST_WRITE */ |
||||
+#ifdef ST_APPEND |
||||
+ if (ins(d, "ST_APPEND", (long)ST_APPEND)) return -1; |
||||
+#endif /* ST_APPEND */ |
||||
+#ifdef ST_NOATIME |
||||
+ if (ins(d, "ST_NOATIME", (long)ST_NOATIME)) return -1; |
||||
+#endif /* ST_NOATIME */ |
||||
+#ifdef ST_NODIRATIME |
||||
+ if (ins(d, "ST_NODIRATIME", (long)ST_NODIRATIME)) return -1; |
||||
+#endif /* ST_NODIRATIME */ |
||||
+#ifdef ST_RELATIME |
||||
+ if (ins(d, "ST_RELATIME", (long)ST_RELATIME)) return -1; |
||||
+#endif /* ST_RELATIME */ |
||||
+ |
||||
#if defined(PYOS_OS2) |
||||
if (insertvalues(d)) return -1; |
||||
#endif |
@ -0,0 +1,13 @@
@@ -0,0 +1,13 @@
|
||||
--- Python-2.7.5/Lib/site.py.orig 2013-05-16 12:47:55.000000000 +0200 |
||||
+++ Python-2.7.5/Lib/site.py 2013-05-16 12:56:20.089058109 +0200 |
||||
@@ -529,6 +529,10 @@ def main(): |
||||
|
||||
abs__file__() |
||||
known_paths = removeduppaths() |
||||
+ from sysconfig import is_python_build |
||||
+ if is_python_build(): |
||||
+ from _sysconfigdata import build_time_vars |
||||
+ sys.path.append(os.path.join(build_time_vars['abs_builddir'], 'Modules')) |
||||
if ENABLE_USER_SITE is None: |
||||
ENABLE_USER_SITE = check_enableusersite() |
||||
known_paths = addusersitepackages(known_paths) |
@ -0,0 +1,20 @@
@@ -0,0 +1,20 @@
|
||||
diff -up Python-2.7/Python/pythonrun.c.less-verbose-COUNT_ALLOCS Python-2.7/Python/pythonrun.c |
||||
--- Python-2.7/Python/pythonrun.c.less-verbose-COUNT_ALLOCS 2010-08-17 14:49:33.321913909 -0400 |
||||
+++ Python-2.7/Python/pythonrun.c 2010-08-17 14:54:48.750910403 -0400 |
||||
@@ -470,7 +470,15 @@ Py_Finalize(void) |
||||
|
||||
/* Debugging stuff */ |
||||
#ifdef COUNT_ALLOCS |
||||
- dump_counts(stdout); |
||||
+ /* This is a downstream Fedora modification. |
||||
+ The upstream default with COUNT_ALLOCS is to always dump the counts to |
||||
+ stdout on exit. For our debug builds its useful to have the info from |
||||
+ COUNT_ALLOCS available, but the stdout info here gets in the way, so |
||||
+ we make it optional, wrapping it in an environment variable (modelled |
||||
+ on the other PYTHONDUMP* env variables): |
||||
+ */ |
||||
+ if (Py_GETENV("PYTHONDUMPCOUNTS")) |
||||
+ dump_counts(stdout); |
||||
#endif |
||||
|
||||
PRINT_TOTAL_REFS(); |
@ -0,0 +1,11 @@
@@ -0,0 +1,11 @@
|
||||
diff -up Python-2.7.2/Lib/test/test_io.py.disable-tests-in-test_io Python-2.7.2/Lib/test/test_io.py |
||||
--- Python-2.7.2/Lib/test/test_io.py.disable-tests-in-test_io 2011-09-01 14:18:45.963304089 -0400 |
||||
+++ Python-2.7.2/Lib/test/test_io.py 2011-09-01 15:08:53.796098413 -0400 |
||||
@@ -2669,6 +2669,7 @@ class SignalsTest(unittest.TestCase): |
||||
self.check_interrupted_read_retry(lambda x: x, |
||||
mode="r") |
||||
|
||||
+ @unittest.skip('rhbz#732998') |
||||
@unittest.skipUnless(threading, 'Threading required for this test.') |
||||
def check_interrupted_write_retry(self, item, **fdopen_kwargs): |
||||
"""Check that a buffered write, when it gets interrupted (either |
@ -0,0 +1,13 @@
@@ -0,0 +1,13 @@
|
||||
diff -up Python-2.7.2/Lib/test/test_dl.py.skip-test_dl Python-2.7.2/Lib/test/test_dl.py |
||||
--- Python-2.7.2/Lib/test/test_dl.py.skip-test_dl 2011-09-08 15:18:40.529034289 -0400 |
||||
+++ Python-2.7.2/Lib/test/test_dl.py 2011-09-08 16:29:45.184742670 -0400 |
||||
@@ -13,6 +13,9 @@ sharedlibs = [ |
||||
('/usr/lib/libc.dylib', 'getpid'), |
||||
] |
||||
|
||||
+# (also, "dl" is deprecated in favor of ctypes) |
||||
+@unittest._skipInRpmBuild('fails on 64-bit builds: ' |
||||
+ 'module dl requires sizeof(int) == sizeof(long) == sizeof(char*)') |
||||
def test_main(): |
||||
for s, func in sharedlibs: |
||||
try: |
@ -0,0 +1,14 @@
@@ -0,0 +1,14 @@
|
||||
--- Python-2.7.2/Lib/test/test_sys.py.mark-tests-that-fail-in-rpmbuild 2011-09-08 18:02:31.627362039 -0400 |
||||
+++ Python-2.7.2/Lib/test/test_sys.py 2011-09-08 18:15:29.450308851 -0400 |
||||
@@ -734,6 +734,11 @@ class SizeofTest(unittest.TestCase): |
||||
# (PyTypeObject + PyNumberMethods + PyMappingMethods + |
||||
# PySequenceMethods + PyBufferProcs) |
||||
s = vsize('P2P15Pl4PP9PP11PI') + struct.calcsize('41P 10P 3P 6P') |
||||
+ |
||||
+ # COUNT_ALLOCS adds further fields to the end of a PyTypeObject: |
||||
+ if hasattr(sys, 'getcounts'): |
||||
+ s += size('P') |
||||
+ |
||||
class newstyleclass(object): |
||||
pass |
||||
check(newstyleclass, s) |
@ -0,0 +1,18 @@
@@ -0,0 +1,18 @@
|
||||
diff -up Python-2.7.2/Lib/test/test_weakref.py.skip-test-within-test_weakref-in-debug-build Python-2.7.2/Lib/test/test_weakref.py |
||||
--- Python-2.7.2/Lib/test/test_weakref.py.skip-test-within-test_weakref-in-debug-build 2011-09-08 17:55:09.675392260 -0400 |
||||
+++ Python-2.7.2/Lib/test/test_weakref.py 2011-09-08 17:59:08.857375903 -0400 |
||||
@@ -550,6 +550,14 @@ class ReferencesTestCase(TestBase): |
||||
del c1, c2, C, D |
||||
gc.collect() |
||||
|
||||
+ # In a debug build, this fails with: |
||||
+ # AssertionError: Lists differ: [] != ['C went away'] |
||||
+ # Second list contains 1 additional elements. |
||||
+ # First extra element 0: |
||||
+ # C went away |
||||
+ # - [] |
||||
+ # + ['C went away'] |
||||
+ @unittest.skipIf(hasattr(sys, 'getobjects'), 'debug build') |
||||
def test_callback_in_cycle_resurrection(self): |
||||
import gc |
||||
|
@ -0,0 +1,22 @@
@@ -0,0 +1,22 @@
|
||||
diff -up Python-2.7.2/Lib/test/test_file2k.py.skip-tests-of-seeking-stdin-in-rpmbuild Python-2.7.2/Lib/test/test_file2k.py |
||||
--- Python-2.7.2/Lib/test/test_file2k.py.skip-tests-of-seeking-stdin-in-rpmbuild 2011-09-08 17:23:50.922520729 -0400 |
||||
+++ Python-2.7.2/Lib/test/test_file2k.py 2011-09-08 17:24:41.368517277 -0400 |
||||
@@ -213,6 +213,7 @@ class OtherFileTests(unittest.TestCase): |
||||
else: |
||||
f.close() |
||||
|
||||
+ @unittest._skipInRpmBuild('seems not to raise the exception when run in Koji') |
||||
def testStdin(self): |
||||
# This causes the interpreter to exit on OSF1 v5.1. |
||||
if sys.platform != 'osf1V5': |
||||
diff -up Python-2.7.2/Lib/test/test_file.py.skip-tests-of-seeking-stdin-in-rpmbuild Python-2.7.2/Lib/test/test_file.py |
||||
--- Python-2.7.2/Lib/test/test_file.py.skip-tests-of-seeking-stdin-in-rpmbuild 2011-09-08 17:20:31.146534389 -0400 |
||||
+++ Python-2.7.2/Lib/test/test_file.py 2011-09-08 17:24:45.016517030 -0400 |
||||
@@ -154,6 +154,7 @@ class OtherFileTests(unittest.TestCase): |
||||
f.close() |
||||
self.fail('%r is an invalid file mode' % mode) |
||||
|
||||
+ @unittest._skipInRpmBuild('seems not to raise the exception when run in Koji') |
||||
def testStdin(self): |
||||
# This causes the interpreter to exit on OSF1 v5.1. |
||||
if sys.platform != 'osf1V5': |
@ -1,12 +1,12 @@
@@ -1,12 +1,12 @@
|
||||
diff -up Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py.skip-distutils-tests-that-fail-in-rpmbuild Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py |
||||
--- Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py.skip-distutils-tests-that-fail-in-rpmbuild 2011-09-03 12:16:40.000000000 -0400 |
||||
+++ Python-3.2.2/Lib/distutils/tests/test_bdist_rpm.py 2011-09-10 05:04:56.328852558 -0400 |
||||
@@ -23,6 +23,7 @@ setup(name='foo', version='0.1', py_modu |
||||
diff -up Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py.mark-tests-that-fail-in-rpmbuild Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py |
||||
--- Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py.mark-tests-that-fail-in-rpmbuild 2012-04-09 19:07:29.000000000 -0400 |
||||
+++ Python-2.7.3/Lib/distutils/tests/test_bdist_rpm.py 2012-04-13 00:20:08.223819263 -0400 |
||||
@@ -24,6 +24,7 @@ setup(name='foo', version='0.1', py_modu |
||||
|
||||
""" |
||||
|
||||
+@unittest._skipInRpmBuild("don't try to nest one rpm build inside another rpm build") |
||||
class BuildRpmTestCase(support.TempdirManager, |
||||
support.EnvironGuard, |
||||
support.LoggingSilencer, |
||||
diff -up Python-3.2.2/Lib/distutils/tests/test_build_ext.py.skip-distutils-tests-that-fail-in-rpmbuild Python-3.2.2/Lib/distutils/tests/test_build_ext.py |
||||
unittest.TestCase): |
||||
diff -up Python-2.7.3/Lib/distutils/tests/test_build_ext.py.mark-tests-that-fail-in-rpmbuild Python-2.7.3/Lib/distutils/tests/test_build_ext.py |
||||
|
@ -0,0 +1,68 @@
@@ -0,0 +1,68 @@
|
||||
diff -up Python-2.7.2/Lib/distutils/tests/test_build_ext.py.mark-tests-that-fail-in-rpmbuild Python-2.7.2/Lib/distutils/tests/test_build_ext.py |
||||
--- Python-2.7.2/Lib/distutils/tests/test_build_ext.py.mark-tests-that-fail-in-rpmbuild 2011-09-08 16:07:25.033834312 -0400 |
||||
+++ Python-2.7.2/Lib/distutils/tests/test_build_ext.py 2011-09-08 17:43:15.656441082 -0400 |
||||
@@ -330,6 +332,7 @@ class BuildExtTestCase(support.TempdirMa |
||||
self.assertEqual(lastdir, 'bar') |
||||
|
||||
def test_ext_fullpath(self): |
||||
+ debug_ext = sysconfig.get_config_var("DEBUG_EXT") |
||||
ext = sysconfig.get_config_vars()['SO'] |
||||
dist = Distribution() |
||||
cmd = build_ext(dist) |
||||
@@ -337,14 +340,14 @@ class BuildExtTestCase(support.TempdirMa |
||||
cmd.distribution.package_dir = {'': 'src'} |
||||
cmd.distribution.packages = ['lxml', 'lxml.html'] |
||||
curdir = os.getcwd() |
||||
- wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext) |
||||
+ wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + debug_ext + ext) |
||||
path = cmd.get_ext_fullpath('lxml.etree') |
||||
self.assertEqual(wanted, path) |
||||
|
||||
# building lxml.etree not inplace |
||||
cmd.inplace = 0 |
||||
cmd.build_lib = os.path.join(curdir, 'tmpdir') |
||||
- wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + ext) |
||||
+ wanted = os.path.join(curdir, 'tmpdir', 'lxml', 'etree' + debug_ext + ext) |
||||
path = cmd.get_ext_fullpath('lxml.etree') |
||||
self.assertEqual(wanted, path) |
||||
|
||||
@@ -354,13 +357,13 @@ class BuildExtTestCase(support.TempdirMa |
||||
cmd.distribution.packages = ['twisted', 'twisted.runner.portmap'] |
||||
path = cmd.get_ext_fullpath('twisted.runner.portmap') |
||||
wanted = os.path.join(curdir, 'tmpdir', 'twisted', 'runner', |
||||
- 'portmap' + ext) |
||||
+ 'portmap' + debug_ext + ext) |
||||
self.assertEqual(wanted, path) |
||||
|
||||
# building twisted.runner.portmap inplace |
||||
cmd.inplace = 1 |
||||
path = cmd.get_ext_fullpath('twisted.runner.portmap') |
||||
- wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + ext) |
||||
+ wanted = os.path.join(curdir, 'twisted', 'runner', 'portmap' + debug_ext + ext) |
||||
self.assertEqual(wanted, path) |
||||
|
||||
def test_build_ext_inplace(self): |
||||
@@ -373,8 +376,9 @@ class BuildExtTestCase(support.TempdirMa |
||||
cmd.distribution.package_dir = {'': 'src'} |
||||
cmd.distribution.packages = ['lxml', 'lxml.html'] |
||||
curdir = os.getcwd() |
||||
+ debug_ext = sysconfig.get_config_var("DEBUG_EXT") |
||||
ext = sysconfig.get_config_var("SO") |
||||
- wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + ext) |
||||
+ wanted = os.path.join(curdir, 'src', 'lxml', 'etree' + debug_ext + ext) |
||||
path = cmd.get_ext_fullpath('lxml.etree') |
||||
self.assertEqual(wanted, path) |
||||
|
||||
@@ -412,10 +416,11 @@ class BuildExtTestCase(support.TempdirMa |
||||
dist = Distribution({'name': 'UpdateManager'}) |
||||
cmd = build_ext(dist) |
||||
cmd.ensure_finalized() |
||||
+ debug_ext = sysconfig.get_config_var("DEBUG_EXT") |
||||
ext = sysconfig.get_config_var("SO") |
||||
ext_name = os.path.join('UpdateManager', 'fdsend') |
||||
ext_path = cmd.get_ext_fullpath(ext_name) |
||||
- wanted = os.path.join(cmd.build_lib, 'UpdateManager', 'fdsend' + ext) |
||||
+ wanted = os.path.join(cmd.build_lib, 'UpdateManager', 'fdsend' + debug_ext + ext) |
||||
self.assertEqual(ext_path, wanted) |
||||
|
||||
def test_build_ext_path_cross_platform(self): |
@ -0,0 +1,11 @@
@@ -0,0 +1,11 @@
|
||||
diff -up Python-2.7.2/Lib/test/test_float.py.skip-test_float-known-failure-on-arm Python-2.7.2/Lib/test/test_float.py |
||||
--- Python-2.7.2/Lib/test/test_float.py.skip-test_float-known-failure-on-arm 2011-09-08 19:34:09.000986128 -0400 |
||||
+++ Python-2.7.2/Lib/test/test_float.py 2011-09-08 19:34:57.969982779 -0400 |
||||
@@ -1072,6 +1072,7 @@ class HexFloatTestCase(unittest.TestCase |
||||
self.identical(got, expected) |
||||
|
||||
|
||||
+ @unittest.skip('Known failure on ARM: http://bugs.python.org/issue8265') |
||||
def test_from_hex(self): |
||||
MIN = self.MIN; |
||||
MAX = self.MAX; |
@ -0,0 +1,11 @@
@@ -0,0 +1,11 @@
|
||||
diff -up Python-2.7.2/Lib/ctypes/test/test_callbacks.py.skip-test_ctypes-known-failure-on-sparc Python-2.7.2/Lib/ctypes/test/test_callbacks.py |
||||
--- Python-2.7.2/Lib/ctypes/test/test_callbacks.py.skip-test_ctypes-known-failure-on-sparc 2011-09-08 19:42:35.541951490 -0400 |
||||
+++ Python-2.7.2/Lib/ctypes/test/test_callbacks.py 2011-09-08 19:43:40.676947036 -0400 |
||||
@@ -67,6 +67,7 @@ class Callbacks(unittest.TestCase): |
||||
self.check_type(c_longlong, 42) |
||||
self.check_type(c_longlong, -42) |
||||
|
||||
+ @unittest.skip('Known failure on Sparc: http://bugs.python.org/issue8314') |
||||
def test_ulonglong(self): |
||||
# test some 64-bit values, with and without msb set. |
||||
self.check_type(c_ulonglong, 10955412242170339782) |
@ -0,0 +1,24 @@
@@ -0,0 +1,24 @@
|
||||
diff -up Python-2.7.2/Lib/test/test_gc.py.fix-test_gc_with_COUNT_ALLOCS Python-2.7.2/Lib/test/test_gc.py |
||||
--- Python-2.7.2/Lib/test/test_gc.py.fix-test_gc_with_COUNT_ALLOCS 2011-09-08 19:49:13.045924309 -0400 |
||||
+++ Python-2.7.2/Lib/test/test_gc.py 2011-09-08 19:50:07.035920617 -0400 |
||||
@@ -102,11 +102,17 @@ class GCTests(unittest.TestCase): |
||||
del a |
||||
self.assertNotEqual(gc.collect(), 0) |
||||
del B, C |
||||
- self.assertNotEqual(gc.collect(), 0) |
||||
+ if hasattr(sys, 'getcounts'): |
||||
+ self.assertEqual(gc.collect(), 0) |
||||
+ else: |
||||
+ self.assertNotEqual(gc.collect(), 0) |
||||
A.a = A() |
||||
del A |
||||
- self.assertNotEqual(gc.collect(), 0) |
||||
- self.assertEqual(gc.collect(), 0) |
||||
+ if hasattr(sys, 'getcounts'): |
||||
+ self.assertEqual(gc.collect(), 0) |
||||
+ else: |
||||
+ self.assertNotEqual(gc.collect(), 0) |
||||
+ self.assertEqual(gc.collect(), 0) |
||||
|
||||
def test_method(self): |
||||
# Tricky: self.__init__ is a bound method, it references the instance. |
@ -0,0 +1,22 @@
@@ -0,0 +1,22 @@
|
||||
diff -up Python-2.7.2/Lib/test/test_openpty.py.skip-failing-pty-tests-in-rpmbuild Python-2.7.2/Lib/test/test_openpty.py |
||||
--- Python-2.7.2/Lib/test/test_openpty.py.skip-failing-pty-tests-in-rpmbuild 2011-09-09 05:09:28.698920379 -0400 |
||||
+++ Python-2.7.2/Lib/test/test_openpty.py 2011-09-09 05:10:54.805914490 -0400 |
||||
@@ -8,6 +8,7 @@ if not hasattr(os, "openpty"): |
||||
|
||||
|
||||
class OpenptyTest(unittest.TestCase): |
||||
+ @unittest._skipInRpmBuild('sometimes fails in Koji, possibly due to a mock issue (rhbz#714627)') |
||||
def test(self): |
||||
master, slave = os.openpty() |
||||
if not os.isatty(slave): |
||||
diff -up Python-2.7.2/Lib/test/test_pty.py.skip-failing-pty-tests-in-rpmbuild Python-2.7.2/Lib/test/test_pty.py |
||||
--- Python-2.7.2/Lib/test/test_pty.py.skip-failing-pty-tests-in-rpmbuild 2011-09-09 05:09:36.781919825 -0400 |
||||
+++ Python-2.7.2/Lib/test/test_pty.py 2011-09-09 05:11:14.741913127 -0400 |
||||
@@ -109,6 +109,7 @@ class PtyTest(unittest.TestCase): |
||||
os.close(master_fd) |
||||
|
||||
|
||||
+ @unittest._skipInRpmBuild('sometimes fails in Koji, possibly due to a mock issue (rhbz#714627)') |
||||
def test_fork(self): |
||||
debug("calling pty.fork()") |
||||
pid, master_fd = pty.fork() |
@ -0,0 +1,58 @@
@@ -0,0 +1,58 @@
|
||||
diff -up Python-2.7.2/Python/ceval.c.tsc-on-ppc Python-2.7.2/Python/ceval.c |
||||
--- Python-2.7.2/Python/ceval.c.tsc-on-ppc 2011-08-23 14:59:48.051300849 -0400 |
||||
+++ Python-2.7.2/Python/ceval.c 2011-08-23 15:33:25.412162902 -0400 |
||||
@@ -37,24 +37,42 @@ typedef unsigned long long uint64; |
||||
*/ |
||||
#if defined(__ppc__) || defined (__powerpc__) |
||||
|
||||
-#define READ_TIMESTAMP(var) ppc_getcounter(&var) |
||||
+#if defined( __powerpc64__) || defined(__LP64__) |
||||
+/* 64-bit PowerPC */ |
||||
+#define READ_TIMESTAMP(var) ppc64_getcounter(&var) |
||||
+static void |
||||
+ppc64_getcounter(uint64 *v) |
||||
+{ |
||||
+ /* On 64-bit PowerPC we can read the 64-bit timebase directly into a |
||||
+ 64-bit register */ |
||||
+ uint64 timebase; |
||||
+#ifdef _ARCH_PWR4 |
||||
+ asm volatile ("mfspr %0,268" : "=r" (timebase)); |
||||
+#else |
||||
+ asm volatile ("mftb %0" : "=r" (timebase)); |
||||
+#endif |
||||
+ *v = timebase; |
||||
+} |
||||
+ |
||||
+#else |
||||
+/* 32-bit PowerPC */ |
||||
+#define READ_TIMESTAMP(var) ppc32_getcounter(&var) |
||||
|
||||
static void |
||||
-ppc_getcounter(uint64 *v) |
||||
+ppc32_getcounter(uint64 *v) |
||||
{ |
||||
- register unsigned long tbu, tb, tbu2; |
||||
+ union { long long ll; long ii[2]; } u; |
||||
+ long tmp; |
||||
|
||||
loop: |
||||
- asm volatile ("mftbu %0" : "=r" (tbu) ); |
||||
- asm volatile ("mftb %0" : "=r" (tb) ); |
||||
- asm volatile ("mftbu %0" : "=r" (tbu2)); |
||||
- if (__builtin_expect(tbu != tbu2, 0)) goto loop; |
||||
- |
||||
- /* The slightly peculiar way of writing the next lines is |
||||
- compiled better by GCC than any other way I tried. */ |
||||
- ((long*)(v))[0] = tbu; |
||||
- ((long*)(v))[1] = tb; |
||||
+ asm volatile ("mftbu %0" : "=r" (u.ii[0]) ); |
||||
+ asm volatile ("mftb %0" : "=r" (u.ii[1]) ); |
||||
+ asm volatile ("mftbu %0" : "=r" (tmp)); |
||||
+ if (__builtin_expect(u.ii[0] != tmp, 0)) goto loop; |
||||
+ |
||||
+ *v = u.ll; |
||||
} |
||||
+#endif /* powerpc 32/64 bit */ |
||||
|
||||
#elif defined(__i386__) |
||||
|
@ -0,0 +1,12 @@
@@ -0,0 +1,12 @@
|
||||
diff -up Python-2.7.2/Modules/Setup.dist.no-gdbm Python-2.7.2/Modules/Setup.dist |
||||
--- Python-2.7.2/Modules/Setup.dist.no-gdbm 2011-09-13 14:25:43.496095926 -0400 |
||||
+++ Python-2.7.2/Modules/Setup.dist 2011-09-13 14:25:46.491095724 -0400 |
||||
@@ -396,7 +396,7 @@ dl dlmodule.c |
||||
# |
||||
# First, look at Setup.config; configure may have set this for you. |
||||
|
||||
-gdbm gdbmmodule.c -lgdbm |
||||
+# gdbm gdbmmodule.c -lgdbm |
||||
|
||||
|
||||
# Sleepycat Berkeley DB interface. |
@ -0,0 +1,762 @@
@@ -0,0 +1,762 @@
|
||||
diff --git a/Include/dictobject.h b/Include/dictobject.h |
||||
index ece01c6..acc1df0 100644 |
||||
--- a/Include/dictobject.h |
||||
+++ b/Include/dictobject.h |
||||
@@ -150,6 +150,8 @@ PyAPI_FUNC(PyObject *) PyDict_GetItemString(PyObject *dp, const char *key); |
||||
PyAPI_FUNC(int) PyDict_SetItemString(PyObject *dp, const char *key, PyObject *item); |
||||
PyAPI_FUNC(int) PyDict_DelItemString(PyObject *dp, const char *key); |
||||
|
||||
+PyAPI_FUNC(void) _PyDict_DebugMallocStats(FILE *out); |
||||
+ |
||||
#ifdef __cplusplus |
||||
} |
||||
#endif |
||||
diff --git a/Include/floatobject.h b/Include/floatobject.h |
||||
index 54e8825..33c6ac0 100644 |
||||
--- a/Include/floatobject.h |
||||
+++ b/Include/floatobject.h |
||||
@@ -132,6 +132,7 @@ PyAPI_FUNC(PyObject *) _PyFloat_FormatAdvanced(PyObject *obj, |
||||
failure. Used in builtin_round in bltinmodule.c. */ |
||||
PyAPI_FUNC(PyObject *) _Py_double_round(double x, int ndigits); |
||||
|
||||
+PyAPI_FUNC(void) _PyFloat_DebugMallocStats(FILE* out); |
||||
|
||||
|
||||
#ifdef __cplusplus |
||||
diff --git a/Include/frameobject.h b/Include/frameobject.h |
||||
index 17e7679..66d9d8b 100644 |
||||
--- a/Include/frameobject.h |
||||
+++ b/Include/frameobject.h |
||||
@@ -80,6 +80,8 @@ PyAPI_FUNC(void) PyFrame_FastToLocals(PyFrameObject *); |
||||
|
||||
PyAPI_FUNC(int) PyFrame_ClearFreeList(void); |
||||
|
||||
+PyAPI_FUNC(void) _PyFrame_DebugMallocStats(FILE *out); |
||||
+ |
||||
/* Return the line of code the frame is currently executing. */ |
||||
PyAPI_FUNC(int) PyFrame_GetLineNumber(PyFrameObject *); |
||||
|
||||
diff --git a/Include/intobject.h b/Include/intobject.h |
||||
index 252eea9..4003736 100644 |
||||
--- a/Include/intobject.h |
||||
+++ b/Include/intobject.h |
||||
@@ -75,6 +75,8 @@ PyAPI_FUNC(PyObject *) _PyInt_FormatAdvanced(PyObject *obj, |
||||
char *format_spec, |
||||
Py_ssize_t format_spec_len); |
||||
|
||||
+PyAPI_FUNC(void) _PyInt_DebugMallocStats(FILE *out); |
||||
+ |
||||
#ifdef __cplusplus |
||||
} |
||||
#endif |
||||
diff --git a/Include/listobject.h b/Include/listobject.h |
||||
index c445873..04664d7 100644 |
||||
--- a/Include/listobject.h |
||||
+++ b/Include/listobject.h |
||||
@@ -62,6 +62,8 @@ PyAPI_FUNC(PyObject *) _PyList_Extend(PyListObject *, PyObject *); |
||||
#define PyList_SET_ITEM(op, i, v) (((PyListObject *)(op))->ob_item[i] = (v)) |
||||
#define PyList_GET_SIZE(op) Py_SIZE(op) |
||||
|
||||
+PyAPI_FUNC(void) _PyList_DebugMallocStats(FILE *out); |
||||
+ |
||||
#ifdef __cplusplus |
||||
} |
||||
#endif |
||||
diff --git a/Include/methodobject.h b/Include/methodobject.h |
||||
index 6e160b6..1944517 100644 |
||||
--- a/Include/methodobject.h |
||||
+++ b/Include/methodobject.h |
||||
@@ -87,6 +87,10 @@ typedef struct { |
||||
|
||||
PyAPI_FUNC(int) PyCFunction_ClearFreeList(void); |
||||
|
||||
+PyAPI_FUNC(void) _PyCFunction_DebugMallocStats(FILE *out); |
||||
+PyAPI_FUNC(void) _PyMethod_DebugMallocStats(FILE *out); |
||||
+ |
||||
+ |
||||
#ifdef __cplusplus |
||||
} |
||||
#endif |
||||
diff --git a/Include/object.h b/Include/object.h |
||||
index afbc68d..ce5febf 100644 |
||||
--- a/Include/object.h |
||||
+++ b/Include/object.h |
||||
@@ -1005,6 +1005,13 @@ PyAPI_FUNC(void) _PyTrash_thread_destroy_chain(void); |
||||
_PyTrash_thread_deposit_object((PyObject*)op); \ |
||||
} while (0); |
||||
|
||||
+PyAPI_FUNC(void) |
||||
+_PyDebugAllocatorStats(FILE *out, const char *block_name, int num_blocks, |
||||
+ size_t sizeof_block); |
||||
+ |
||||
+PyAPI_FUNC(void) |
||||
+_PyObject_DebugTypeStats(FILE *out); |
||||
+ |
||||
#ifdef __cplusplus |
||||
} |
||||
#endif |
||||
diff --git a/Include/objimpl.h b/Include/objimpl.h |
||||
index 55e83ec..331b456 100644 |
||||
--- a/Include/objimpl.h |
||||
+++ b/Include/objimpl.h |
||||
@@ -101,13 +101,13 @@ PyAPI_FUNC(void) PyObject_Free(void *); |
||||
|
||||
/* Macros */ |
||||
#ifdef WITH_PYMALLOC |
||||
+PyAPI_FUNC(void) _PyObject_DebugMallocStats(FILE *out); |
||||
#ifdef PYMALLOC_DEBUG /* WITH_PYMALLOC && PYMALLOC_DEBUG */ |
||||
PyAPI_FUNC(void *) _PyObject_DebugMalloc(size_t nbytes); |
||||
PyAPI_FUNC(void *) _PyObject_DebugRealloc(void *p, size_t nbytes); |
||||
PyAPI_FUNC(void) _PyObject_DebugFree(void *p); |
||||
PyAPI_FUNC(void) _PyObject_DebugDumpAddress(const void *p); |
||||
PyAPI_FUNC(void) _PyObject_DebugCheckAddress(const void *p); |
||||
-PyAPI_FUNC(void) _PyObject_DebugMallocStats(void); |
||||
PyAPI_FUNC(void *) _PyObject_DebugMallocApi(char api, size_t nbytes); |
||||
PyAPI_FUNC(void *) _PyObject_DebugReallocApi(char api, void *p, size_t nbytes); |
||||
PyAPI_FUNC(void) _PyObject_DebugFreeApi(char api, void *p); |
||||
diff --git a/Include/setobject.h b/Include/setobject.h |
||||
index 52b07d5..143b175 100644 |
||||
--- a/Include/setobject.h |
||||
+++ b/Include/setobject.h |
||||
@@ -93,6 +93,7 @@ PyAPI_FUNC(int) _PySet_NextEntry(PyObject *set, Py_ssize_t *pos, PyObject **key, |
||||
PyAPI_FUNC(PyObject *) PySet_Pop(PyObject *set); |
||||
PyAPI_FUNC(int) _PySet_Update(PyObject *set, PyObject *iterable); |
||||
|
||||
+PyAPI_FUNC(void) _PySet_DebugMallocStats(FILE *out); |
||||
#ifdef __cplusplus |
||||
} |
||||
#endif |
||||
diff --git a/Include/stringobject.h b/Include/stringobject.h |
||||
index 18b5b41..de78d76 100644 |
||||
--- a/Include/stringobject.h |
||||
+++ b/Include/stringobject.h |
||||
@@ -204,6 +204,8 @@ PyAPI_FUNC(PyObject *) _PyBytes_FormatAdvanced(PyObject *obj, |
||||
char *format_spec, |
||||
Py_ssize_t format_spec_len); |
||||
|
||||
+PyAPI_FUNC(void) _PyString_DebugMallocStats(FILE *out); |
||||
+ |
||||
#ifdef __cplusplus |
||||
} |
||||
#endif |
||||
diff --git a/Include/tupleobject.h b/Include/tupleobject.h |
||||
index a5ab733..e233f47 100644 |
||||
--- a/Include/tupleobject.h |
||||
+++ b/Include/tupleobject.h |
||||
@@ -54,7 +54,7 @@ PyAPI_FUNC(void) _PyTuple_MaybeUntrack(PyObject *); |
||||
#define PyTuple_SET_ITEM(op, i, v) (((PyTupleObject *)(op))->ob_item[i] = v) |
||||
|
||||
PyAPI_FUNC(int) PyTuple_ClearFreeList(void); |
||||
- |
||||
+PyAPI_FUNC(void) _PyTuple_DebugMallocStats(FILE *out); |
||||
#ifdef __cplusplus |
||||
} |
||||
#endif |
||||
diff --git a/Include/unicodeobject.h b/Include/unicodeobject.h |
||||
index 9ab724a..b91250a 100644 |
||||
--- a/Include/unicodeobject.h |
||||
+++ b/Include/unicodeobject.h |
||||
@@ -1406,6 +1406,8 @@ PyAPI_FUNC(int) _PyUnicode_IsAlpha( |
||||
Py_UNICODE ch /* Unicode character */ |
||||
); |
||||
|
||||
+PyAPI_FUNC(void) _PyUnicode_DebugMallocStats(FILE *out); |
||||
+ |
||||
#ifdef __cplusplus |
||||
} |
||||
#endif |
||||
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py |
||||
index 82243f3..8f1e1a0 100644 |
||||
--- a/Lib/test/test_sys.py |
||||
+++ b/Lib/test/test_sys.py |
||||
@@ -488,6 +488,32 @@ class SysModuleTest(unittest.TestCase): |
||||
p.wait() |
||||
self.assertIn(executable, ["''", repr(sys.executable)]) |
||||
|
||||
+ def test_debugmallocstats(self): |
||||
+ # Test sys._debugmallocstats() |
||||
+ |
||||
+ import subprocess |
||||
+ |
||||
+ # Verify the default of writing to stderr: |
||||
+ p = subprocess.Popen([sys.executable, |
||||
+ '-c', 'import sys; sys._debugmallocstats()'], |
||||
+ stderr=subprocess.PIPE) |
||||
+ out, err = p.communicate() |
||||
+ p.wait() |
||||
+ self.assertIn("arenas allocated current", err) |
||||
+ |
||||
+ # Verify that we can redirect the output to a file (not a file-like |
||||
+ # object, though): |
||||
+ with open('mallocstats.txt', 'w') as out: |
||||
+ sys._debugmallocstats(out) |
||||
+ result = open('mallocstats.txt').read() |
||||
+ self.assertIn("arenas allocated current", result) |
||||
+ os.unlink('mallocstats.txt') |
||||
+ |
||||
+ # Verify that the destination must be a file: |
||||
+ with self.assertRaises(TypeError): |
||||
+ sys._debugmallocstats(42) |
||||
+ |
||||
+ |
||||
class SizeofTest(unittest.TestCase): |
||||
|
||||
def setUp(self): |
||||
diff --git a/Objects/classobject.c b/Objects/classobject.c |
||||
index 2c9c216..2ba7077 100644 |
||||
--- a/Objects/classobject.c |
||||
+++ b/Objects/classobject.c |
||||
@@ -2694,3 +2694,12 @@ PyMethod_Fini(void) |
||||
{ |
||||
(void)PyMethod_ClearFreeList(); |
||||
} |
||||
+ |
||||
+/* Print summary info about the state of the optimized allocator */ |
||||
+void |
||||
+_PyMethod_DebugMallocStats(FILE *out) |
||||
+{ |
||||
+ _PyDebugAllocatorStats(out, |
||||
+ "free PyMethodObject", |
||||
+ numfree, sizeof(PyMethodObject)); |
||||
+} |
||||
diff --git a/Objects/dictobject.c b/Objects/dictobject.c |
||||
index ba36b18..b8a5c7f 100644 |
||||
--- a/Objects/dictobject.c |
||||
+++ b/Objects/dictobject.c |
||||
@@ -225,6 +225,15 @@ show_track(void) |
||||
static PyDictObject *free_list[PyDict_MAXFREELIST]; |
||||
static int numfree = 0; |
||||
|
||||
+/* Print summary info about the state of the optimized allocator */ |
||||
+void |
||||
+_PyDict_DebugMallocStats(FILE *out) |
||||
+{ |
||||
+ _PyDebugAllocatorStats(out, |
||||
+ "free PyDictObject", numfree, sizeof(PyDictObject)); |
||||
+} |
||||
+ |
||||
+ |
||||
void |
||||
PyDict_Fini(void) |
||||
{ |
||||
diff --git a/Objects/floatobject.c b/Objects/floatobject.c |
||||
index ba867ef..533511d 100644 |
||||
--- a/Objects/floatobject.c |
||||
+++ b/Objects/floatobject.c |
||||
@@ -35,6 +35,22 @@ typedef struct _floatblock PyFloatBlock; |
||||
static PyFloatBlock *block_list = NULL; |
||||
static PyFloatObject *free_list = NULL; |
||||
|
||||
+/* Print summary info about the state of the optimized allocator */ |
||||
+void |
||||
+_PyFloat_DebugMallocStats(FILE *out) |
||||
+{ |
||||
+ int num_blocks = 0; |
||||
+ PyFloatBlock *block; |
||||
+ |
||||
+ /* Walk the block list, counting */ |
||||
+ for (block = block_list; block ; block = block->next) { |
||||
+ num_blocks++; |
||||
+ } |
||||
+ |
||||
+ _PyDebugAllocatorStats(out, |
||||
+ "PyFloatBlock", num_blocks, sizeof(PyFloatBlock)); |
||||
+} |
||||
+ |
||||
static PyFloatObject * |
||||
fill_free_list(void) |
||||
{ |
||||
diff --git a/Objects/frameobject.c b/Objects/frameobject.c |
||||
index f9e4a0e..337fc58 100644 |
||||
--- a/Objects/frameobject.c |
||||
+++ b/Objects/frameobject.c |
||||
@@ -982,3 +982,13 @@ PyFrame_Fini(void) |
||||
Py_XDECREF(builtin_object); |
||||
builtin_object = NULL; |
||||
} |
||||
+ |
||||
+/* Print summary info about the state of the optimized allocator */ |
||||
+void |
||||
+_PyFrame_DebugMallocStats(FILE *out) |
||||
+{ |
||||
+ _PyDebugAllocatorStats(out, |
||||
+ "free PyFrameObject", |
||||
+ numfree, sizeof(PyFrameObject)); |
||||
+} |
||||
+ |
||||
diff --git a/Objects/intobject.c b/Objects/intobject.c |
||||
index 28182f9..f442ea0 100644 |
||||
--- a/Objects/intobject.c |
||||
+++ b/Objects/intobject.c |
||||
@@ -44,6 +44,23 @@ typedef struct _intblock PyIntBlock; |
||||
static PyIntBlock *block_list = NULL; |
||||
static PyIntObject *free_list = NULL; |
||||
|
||||
+ |
||||
+/* Print summary info about the state of the optimized allocator */ |
||||
+void |
||||
+_PyInt_DebugMallocStats(FILE *out) |
||||
+{ |
||||
+ int num_blocks = 0; |
||||
+ PyIntBlock *block; |
||||
+ |
||||
+ /* Walk the block list, counting */ |
||||
+ for (block = block_list; block ; block = block->next) { |
||||
+ num_blocks++; |
||||
+ } |
||||
+ |
||||
+ _PyDebugAllocatorStats(out, |
||||
+ "PyIntBlock", num_blocks, sizeof(PyIntBlock)); |
||||
+} |
||||
+ |
||||
static PyIntObject * |
||||
fill_free_list(void) |
||||
{ |
||||
diff --git a/Objects/listobject.c b/Objects/listobject.c |
||||
index f753643..e6fa17d 100644 |
||||
--- a/Objects/listobject.c |
||||
+++ b/Objects/listobject.c |
||||
@@ -109,6 +109,15 @@ PyList_Fini(void) |
||||
} |
||||
} |
||||
|
||||
+/* Print summary info about the state of the optimized allocator */ |
||||
+void |
||||
+_PyList_DebugMallocStats(FILE *out) |
||||
+{ |
||||
+ _PyDebugAllocatorStats(out, |
||||
+ "free PyListObject", |
||||
+ numfree, sizeof(PyListObject)); |
||||
+} |
||||
+ |
||||
PyObject * |
||||
PyList_New(Py_ssize_t size) |
||||
{ |
||||
diff --git a/Objects/methodobject.c b/Objects/methodobject.c |
||||
index 0b60ca3..3193135 100644 |
||||
--- a/Objects/methodobject.c |
||||
+++ b/Objects/methodobject.c |
||||
@@ -412,6 +412,15 @@ PyCFunction_Fini(void) |
||||
(void)PyCFunction_ClearFreeList(); |
||||
} |
||||
|
||||
+/* Print summary info about the state of the optimized allocator */ |
||||
+void |
||||
+_PyCFunction_DebugMallocStats(FILE *out) |
||||
+{ |
||||
+ _PyDebugAllocatorStats(out, |
||||
+ "free PyCFunction", |
||||
+ numfree, sizeof(PyCFunction)); |
||||
+} |
||||
+ |
||||
/* PyCFunction_New() is now just a macro that calls PyCFunction_NewEx(), |
||||
but it's part of the API so we need to keep a function around that |
||||
existing C extensions can call. |
||||
diff --git a/Objects/object.c b/Objects/object.c |
||||
index 14f4e9f..68aedcd 100644 |
||||
--- a/Objects/object.c |
||||
+++ b/Objects/object.c |
||||
@@ -2355,6 +2355,23 @@ PyMem_Free(void *p) |
||||
PyMem_FREE(p); |
||||
} |
||||
|
||||
+void |
||||
+_PyObject_DebugTypeStats(FILE *out) |
||||
+{ |
||||
+ _PyString_DebugMallocStats(out); |
||||
+ _PyCFunction_DebugMallocStats(out); |
||||
+ _PyDict_DebugMallocStats(out); |
||||
+ _PyFloat_DebugMallocStats(out); |
||||
+ _PyFrame_DebugMallocStats(out); |
||||
+ _PyInt_DebugMallocStats(out); |
||||
+ _PyList_DebugMallocStats(out); |
||||
+ _PyMethod_DebugMallocStats(out); |
||||
+ _PySet_DebugMallocStats(out); |
||||
+ _PyTuple_DebugMallocStats(out); |
||||
+#if Py_USING_UNICODE |
||||
+ _PyUnicode_DebugMallocStats(out); |
||||
+#endif |
||||
+} |
||||
|
||||
/* These methods are used to control infinite recursion in repr, str, print, |
||||
etc. Container objects that may recursively contain themselves, |
||||
diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c |
||||
index 38ebc37..2c05359 100644 |
||||
--- a/Objects/obmalloc.c |
||||
+++ b/Objects/obmalloc.c |
||||
@@ -508,12 +508,10 @@ static struct arena_object* usable_arenas = NULL; |
||||
/* Number of arenas allocated that haven't been free()'d. */ |
||||
static size_t narenas_currently_allocated = 0; |
||||
|
||||
-#ifdef PYMALLOC_DEBUG |
||||
/* Total number of times malloc() called to allocate an arena. */ |
||||
static size_t ntimes_arena_allocated = 0; |
||||
/* High water mark (max value ever seen) for narenas_currently_allocated. */ |
||||
static size_t narenas_highwater = 0; |
||||
-#endif |
||||
|
||||
/* Allocate a new arena. If we run out of memory, return NULL. Else |
||||
* allocate a new arena, and return the address of an arena_object |
||||
@@ -528,7 +526,7 @@ new_arena(void) |
||||
|
||||
#ifdef PYMALLOC_DEBUG |
||||
if (Py_GETENV("PYTHONMALLOCSTATS")) |
||||
- _PyObject_DebugMallocStats(); |
||||
+ _PyObject_DebugMallocStats(stderr); |
||||
#endif |
||||
if (unused_arena_objects == NULL) { |
||||
uint i; |
||||
@@ -588,11 +586,9 @@ new_arena(void) |
||||
} |
||||
|
||||
++narenas_currently_allocated; |
||||
-#ifdef PYMALLOC_DEBUG |
||||
++ntimes_arena_allocated; |
||||
if (narenas_currently_allocated > narenas_highwater) |
||||
narenas_highwater = narenas_currently_allocated; |
||||
-#endif |
||||
arenaobj->freepools = NULL; |
||||
/* pool_address <- first pool-aligned address in the arena |
||||
nfreepools <- number of whole pools that fit after alignment */ |
||||
@@ -1694,17 +1690,19 @@ _PyObject_DebugDumpAddress(const void *p) |
||||
} |
||||
} |
||||
|
||||
+#endif /* PYMALLOC_DEBUG */ |
||||
+ |
||||
static size_t |
||||
-printone(const char* msg, size_t value) |
||||
+printone(FILE *out, const char* msg, size_t value) |
||||
{ |
||||
int i, k; |
||||
char buf[100]; |
||||
size_t origvalue = value; |
||||
|
||||
- fputs(msg, stderr); |
||||
+ fputs(msg, out); |
||||
for (i = (int)strlen(msg); i < 35; ++i) |
||||
- fputc(' ', stderr); |
||||
- fputc('=', stderr); |
||||
+ fputc(' ', out); |
||||
+ fputc('=', out); |
||||
|
||||
/* Write the value with commas. */ |
||||
i = 22; |
||||
@@ -1725,17 +1723,32 @@ printone(const char* msg, size_t value) |
||||
|
||||
while (i >= 0) |
||||
buf[i--] = ' '; |
||||
- fputs(buf, stderr); |
||||
+ fputs(buf, out); |
||||
|
||||
return origvalue; |
||||
} |
||||
|
||||
-/* Print summary info to stderr about the state of pymalloc's structures. |
||||
+void |
||||
+_PyDebugAllocatorStats(FILE *out, |
||||
+ const char *block_name, int num_blocks, size_t sizeof_block) |
||||
+{ |
||||
+ char buf1[128]; |
||||
+ char buf2[128]; |
||||
+ PyOS_snprintf(buf1, sizeof(buf1), |
||||
+ "%d %ss * %zd bytes each", |
||||
+ num_blocks, block_name, sizeof_block); |
||||
+ PyOS_snprintf(buf2, sizeof(buf2), |
||||
+ "%48s ", buf1); |
||||
+ (void)printone(out, buf2, num_blocks * sizeof_block); |
||||
+} |
||||
+ |
||||
+ |
||||
+/* Print summary info to "out" about the state of pymalloc's structures. |
||||
* In Py_DEBUG mode, also perform some expensive internal consistency |
||||
* checks. |
||||
*/ |
||||
void |
||||
-_PyObject_DebugMallocStats(void) |
||||
+_PyObject_DebugMallocStats(FILE *out) |
||||
{ |
||||
uint i; |
||||
const uint numclasses = SMALL_REQUEST_THRESHOLD >> ALIGNMENT_SHIFT; |
||||
@@ -1764,7 +1777,7 @@ _PyObject_DebugMallocStats(void) |
||||
size_t total; |
||||
char buf[128]; |
||||
|
||||
- fprintf(stderr, "Small block threshold = %d, in %u size classes.\n", |
||||
+ fprintf(out, "Small block threshold = %d, in %u size classes.\n", |
||||
SMALL_REQUEST_THRESHOLD, numclasses); |
||||
|
||||
for (i = 0; i < numclasses; ++i) |
||||
@@ -1818,10 +1831,10 @@ _PyObject_DebugMallocStats(void) |
||||
} |
||||
assert(narenas == narenas_currently_allocated); |
||||
|
||||
- fputc('\n', stderr); |
||||
+ fputc('\n', out); |
||||
fputs("class size num pools blocks in use avail blocks\n" |
||||
"----- ---- --------- ------------- ------------\n", |
||||
- stderr); |
||||
+ out); |
||||
|
||||
for (i = 0; i < numclasses; ++i) { |
||||
size_t p = numpools[i]; |
||||
@@ -1832,7 +1845,7 @@ _PyObject_DebugMallocStats(void) |
||||
assert(b == 0 && f == 0); |
||||
continue; |
||||
} |
||||
- fprintf(stderr, "%5u %6u " |
||||
+ fprintf(out, "%5u %6u " |
||||
"%11" PY_FORMAT_SIZE_T "u " |
||||
"%15" PY_FORMAT_SIZE_T "u " |
||||
"%13" PY_FORMAT_SIZE_T "u\n", |
||||
@@ -1842,36 +1855,35 @@ _PyObject_DebugMallocStats(void) |
||||
pool_header_bytes += p * POOL_OVERHEAD; |
||||
quantization += p * ((POOL_SIZE - POOL_OVERHEAD) % size); |
||||
} |
||||
- fputc('\n', stderr); |
||||
- (void)printone("# times object malloc called", serialno); |
||||
- |
||||
- (void)printone("# arenas allocated total", ntimes_arena_allocated); |
||||
- (void)printone("# arenas reclaimed", ntimes_arena_allocated - narenas); |
||||
- (void)printone("# arenas highwater mark", narenas_highwater); |
||||
- (void)printone("# arenas allocated current", narenas); |
||||
+ fputc('\n', out); |
||||
+#ifdef PYMALLOC_DEBUG |
||||
+ (void)printone(out, "# times object malloc called", serialno); |
||||
+#endif |
||||
+ (void)printone(out, "# arenas allocated total", ntimes_arena_allocated); |
||||
+ (void)printone(out, "# arenas reclaimed", ntimes_arena_allocated - narenas); |
||||
+ (void)printone(out, "# arenas highwater mark", narenas_highwater); |
||||
+ (void)printone(out, "# arenas allocated current", narenas); |
||||
|
||||
PyOS_snprintf(buf, sizeof(buf), |
||||
"%" PY_FORMAT_SIZE_T "u arenas * %d bytes/arena", |
||||
narenas, ARENA_SIZE); |
||||
- (void)printone(buf, narenas * ARENA_SIZE); |
||||
+ (void)printone(out, buf, narenas * ARENA_SIZE); |
||||
|
||||
- fputc('\n', stderr); |
||||
+ fputc('\n', out); |
||||
|
||||
- total = printone("# bytes in allocated blocks", allocated_bytes); |
||||
- total += printone("# bytes in available blocks", available_bytes); |
||||
+ total = printone(out, "# bytes in allocated blocks", allocated_bytes); |
||||
+ total += printone(out, "# bytes in available blocks", available_bytes); |
||||
|
||||
PyOS_snprintf(buf, sizeof(buf), |
||||
"%u unused pools * %d bytes", numfreepools, POOL_SIZE); |
||||
- total += printone(buf, (size_t)numfreepools * POOL_SIZE); |
||||
+ total += printone(out, buf, (size_t)numfreepools * POOL_SIZE); |
||||
|
||||
- total += printone("# bytes lost to pool headers", pool_header_bytes); |
||||
- total += printone("# bytes lost to quantization", quantization); |
||||
- total += printone("# bytes lost to arena alignment", arena_alignment); |
||||
- (void)printone("Total", total); |
||||
+ total += printone(out, "# bytes lost to pool headers", pool_header_bytes); |
||||
+ total += printone(out, "# bytes lost to quantization", quantization); |
||||
+ total += printone(out, "# bytes lost to arena alignment", arena_alignment); |
||||
+ (void)printone(out, "Total", total); |
||||
} |
||||
|
||||
-#endif /* PYMALLOC_DEBUG */ |
||||
- |
||||
#ifdef Py_USING_MEMORY_DEBUGGER |
||||
/* Make this function last so gcc won't inline it since the definition is |
||||
* after the reference. |
||||
diff --git a/Objects/setobject.c b/Objects/setobject.c |
||||
index af1ce16..3439b7c 100644 |
||||
--- a/Objects/setobject.c |
||||
+++ b/Objects/setobject.c |
||||
@@ -1088,6 +1088,16 @@ PySet_Fini(void) |
||||
Py_CLEAR(emptyfrozenset); |
||||
} |
||||
|
||||
+/* Print summary info about the state of the optimized allocator */ |
||||
+void |
||||
+_PySet_DebugMallocStats(FILE *out) |
||||
+{ |
||||
+ _PyDebugAllocatorStats(out, |
||||
+ "free PySetObject", |
||||
+ numfree, sizeof(PySetObject)); |
||||
+} |
||||
+ |
||||
+ |
||||
static PyObject * |
||||
set_new(PyTypeObject *type, PyObject *args, PyObject *kwds) |
||||
{ |
||||
diff --git a/Objects/stringobject.c b/Objects/stringobject.c |
||||
index 1209197..b8646dd 100644 |
||||
--- a/Objects/stringobject.c |
||||
+++ b/Objects/stringobject.c |
||||
@@ -4843,3 +4843,43 @@ void _Py_ReleaseInternedStrings(void) |
||||
PyDict_Clear(interned); |
||||
Py_CLEAR(interned); |
||||
} |
||||
+ |
||||
+void _PyString_DebugMallocStats(FILE *out) |
||||
+{ |
||||
+ ssize_t i; |
||||
+ int num_immortal = 0, num_mortal = 0; |
||||
+ ssize_t immortal_size = 0, mortal_size = 0; |
||||
+ |
||||
+ if (interned == NULL || !PyDict_Check(interned)) |
||||
+ return; |
||||
+ |
||||
+ for (i = 0; i <= ((PyDictObject*)interned)->ma_mask; i++) { |
||||
+ PyDictEntry *ep = ((PyDictObject*)interned)->ma_table + i; |
||||
+ PyObject *pvalue = ep->me_value; |
||||
+ if (pvalue != NULL) { |
||||
+ PyStringObject *s = (PyStringObject *)ep->me_key; |
||||
+ |
||||
+ switch (s->ob_sstate) { |
||||
+ case SSTATE_NOT_INTERNED: |
||||
+ /* XXX Shouldn't happen */ |
||||
+ break; |
||||
+ case SSTATE_INTERNED_IMMORTAL: |
||||
+ num_immortal ++; |
||||
+ immortal_size += s->ob_size; |
||||
+ break; |
||||
+ case SSTATE_INTERNED_MORTAL: |
||||
+ num_mortal ++; |
||||
+ mortal_size += s->ob_size; |
||||
+ break; |
||||
+ default: |
||||
+ Py_FatalError("Inconsistent interned string state."); |
||||
+ } |
||||
+ } |
||||
+ } |
||||
+ |
||||
+ fprintf(out, "%d mortal interned strings\n", num_mortal); |
||||
+ fprintf(out, "%d immortal interned strings\n", num_immortal); |
||||
+ fprintf(out, "total size of all interned strings: " |
||||
+ "%zi/%zi " |
||||
+ "mortal/immortal\n", mortal_size, immortal_size); |
||||
+} |
||||
diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c |
||||
index 00f2e47..7682d81 100644 |
||||
--- a/Objects/tupleobject.c |
||||
+++ b/Objects/tupleobject.c |
||||
@@ -44,6 +44,22 @@ show_track(void) |
||||
} |
||||
#endif |
||||
|
||||
+/* Print summary info about the state of the optimized allocator */ |
||||
+void |
||||
+_PyTuple_DebugMallocStats(FILE *out) |
||||
+{ |
||||
+#if PyTuple_MAXSAVESIZE > 0 |
||||
+ int i; |
||||
+ char buf[128]; |
||||
+ for (i = 1; i < PyTuple_MAXSAVESIZE; i++) { |
||||
+ PyOS_snprintf(buf, sizeof(buf), |
||||
+ "free %d-sized PyTupleObject", i); |
||||
+ _PyDebugAllocatorStats(out, |
||||
+ buf, |
||||
+ numfree[i], _PyObject_VAR_SIZE(&PyTuple_Type, i)); |
||||
+ } |
||||
+#endif |
||||
+} |
||||
|
||||
PyObject * |
||||
PyTuple_New(register Py_ssize_t size) |
||||
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c |
||||
index 6bea370..ced9acf 100644 |
||||
--- a/Objects/unicodeobject.c |
||||
+++ b/Objects/unicodeobject.c |
||||
@@ -8920,6 +8920,12 @@ _PyUnicode_Fini(void) |
||||
(void)PyUnicode_ClearFreeList(); |
||||
} |
||||
|
||||
+void _PyUnicode_DebugMallocStats(FILE *out) |
||||
+{ |
||||
+ _PyDebugAllocatorStats(out, "free PyUnicodeObject", numfree, |
||||
+ sizeof(PyUnicodeObject)); |
||||
+} |
||||
+ |
||||
#ifdef __cplusplus |
||||
} |
||||
#endif |
||||
diff --git a/Python/pythonrun.c b/Python/pythonrun.c |
||||
index f0fbd74..0b73f3a 100644 |
||||
--- a/Python/pythonrun.c |
||||
+++ b/Python/pythonrun.c |
||||
@@ -557,7 +557,7 @@ Py_Finalize(void) |
||||
#endif /* Py_TRACE_REFS */ |
||||
#ifdef PYMALLOC_DEBUG |
||||
if (Py_GETENV("PYTHONMALLOCSTATS")) |
||||
- _PyObject_DebugMallocStats(); |
||||
+ _PyObject_DebugMallocStats(stderr); |
||||
#endif |
||||
|
||||
call_ll_exitfuncs(); |
||||
diff --git a/Python/sysmodule.c b/Python/sysmodule.c |
||||
index 2a7c207..fbb637b 100644 |
||||
--- a/Python/sysmodule.c |
||||
+++ b/Python/sysmodule.c |
||||
@@ -873,6 +873,57 @@ a 11-tuple where the entries in the tuple are counts of:\n\ |
||||
extern "C" { |
||||
#endif |
||||
|
||||
+static PyObject * |
||||
+sys_debugmallocstats(PyObject *self, PyObject *args) |
||||
+{ |
||||
+ PyObject *file = NULL; |
||||
+ FILE *fp; |
||||
+ |
||||
+ if (!PyArg_ParseTuple(args, "|O!", |
||||
+ &PyFile_Type, &file)) { |
||||
+ return NULL; |
||||
+ } |
||||
+ if (!file) { |
||||
+ /* Default to sys.stderr: */ |
||||
+ file = PySys_GetObject("stderr"); |
||||
+ if (!file) { |
||||
+ PyErr_SetString(PyExc_ValueError, "sys.stderr not set"); |
||||
+ return NULL; |
||||
+ } |
||||
+ if (!PyFile_Check(file)) { |
||||
+ PyErr_SetString(PyExc_TypeError, "sys.stderr is not a file"); |
||||
+ return NULL; |
||||
+ } |
||||
+ } |
||||
+ |
||||
+ Py_INCREF(file); |
||||
+ /* OK, we now own a ref on non-NULL "file" */ |
||||
+ |
||||
+ fp = PyFile_AsFile(file); |
||||
+ if (!fp) { |
||||
+ PyErr_SetString(PyExc_ValueError, "file is closed"); |
||||
+ Py_DECREF(file); |
||||
+ return NULL; |
||||
+ } |
||||
+ |
||||
+ _PyObject_DebugMallocStats(fp); |
||||
+ fputc('\n', fp); |
||||
+ _PyObject_DebugTypeStats(fp); |
||||
+ |
||||
+ Py_DECREF(file); |
||||
+ |
||||
+ Py_RETURN_NONE; |
||||
+} |
||||
+PyDoc_STRVAR(debugmallocstats_doc, |
||||
+"_debugmallocstats([file])\n\ |
||||
+\n\ |
||||
+Print summary info to the given file (or sys.stderr) about the state of\n\ |
||||
+pymalloc's structures.\n\ |
||||
+\n\ |
||||
+In Py_DEBUG mode, also perform some expensive internal consistency\n\ |
||||
+checks.\n\ |
||||
+"); |
||||
+ |
||||
#ifdef Py_TRACE_REFS |
||||
/* Defined in objects.c because it uses static globals if that file */ |
||||
extern PyObject *_Py_GetObjects(PyObject *, PyObject *); |
||||
@@ -971,6 +1022,8 @@ static PyMethodDef sys_methods[] = { |
||||
{"settrace", sys_settrace, METH_O, settrace_doc}, |
||||
{"gettrace", sys_gettrace, METH_NOARGS, gettrace_doc}, |
||||
{"call_tracing", sys_call_tracing, METH_VARARGS, call_tracing_doc}, |
||||
+ {"_debugmallocstats", sys_debugmallocstats, METH_VARARGS, |
||||
+ debugmallocstats_doc}, |
||||
{NULL, NULL} /* sentinel */ |
||||
}; |
||||
|
@ -0,0 +1,31 @@
@@ -0,0 +1,31 @@
|
||||
--- Lib/test/test_gdb.py.old 2012-04-11 21:04:01.367073855 -0400 |
||||
+++ Lib/test/test_gdb.py 2012-04-12 08:52:58.320288761 -0400 |
||||
@@ -96,6 +96,15 @@ class DebuggerTests(unittest.TestCase): |
||||
# Generate a list of commands in gdb's language: |
||||
commands = ['set breakpoint pending yes', |
||||
'break %s' % breakpoint, |
||||
+ |
||||
+ # GDB as of Fedora 17 onwards can distinguish between the |
||||
+ # value of a variable at entry vs current value: |
||||
+ # http://sourceware.org/gdb/onlinedocs/gdb/Variables.html |
||||
+ # which leads to the selftests failing with errors like this: |
||||
+ # AssertionError: 'v@entry=()' != '()' |
||||
+ # Disable this: |
||||
+ 'set print entry-values no', |
||||
+ |
||||
'run'] |
||||
if cmds_after_breakpoint: |
||||
commands += cmds_after_breakpoint |
||||
--- Lib/test/test_gdb.py.old 2012-04-11 21:04:01.367073855 -0400 |
||||
+++ Lib/test/test_gdb.py 2012-04-12 08:52:58.320288761 -0400 |
||||
@@ -144,6 +153,10 @@ |
||||
'Do you need "set solib-search-path" or ' |
||||
'"set sysroot"?', |
||||
) |
||||
+ ignore_patterns += ('warning: Unable to open', |
||||
+ 'Missing separate debuginfo for', |
||||
+ 'Try: yum --disablerepo=', |
||||
+ 'Undefined set print command') |
||||
for line in errlines: |
||||
if not line.startswith(ignore_patterns): |
||||
unexpected_errlines.append(line) |
@ -0,0 +1,52 @@
@@ -0,0 +1,52 @@
|
||||
diff -up Python-2.7.3/Lib/test/test_gdb.py.gdb-autoload-safepath Python-2.7.3/Lib/test/test_gdb.py |
||||
--- Python-2.7.3/Lib/test/test_gdb.py.gdb-autoload-safepath 2012-04-30 15:53:57.254045220 -0400 |
||||
+++ Python-2.7.3/Lib/test/test_gdb.py 2012-04-30 16:19:19.569941124 -0400 |
||||
@@ -54,6 +54,19 @@ def gdb_has_frame_select(): |
||||
|
||||
HAS_PYUP_PYDOWN = gdb_has_frame_select() |
||||
|
||||
+def gdb_has_autoload_safepath(): |
||||
+ # Recent GDBs will only auto-load scripts from certain safe |
||||
+ # locations, so we will need to turn off this protection. |
||||
+ # However, if the GDB doesn't have it, then the following |
||||
+ # command will generate noise on stderr (rhbz#817072): |
||||
+ cmd = "--eval-command=set auto-load safe-path /" |
||||
+ p = subprocess.Popen(["gdb", "--batch", cmd], |
||||
+ stderr=subprocess.PIPE) |
||||
+ _, stderr = p.communicate() |
||||
+ return '"on" or "off" expected.' not in stderr |
||||
+ |
||||
+HAS_AUTOLOAD_SAFEPATH = gdb_has_autoload_safepath() |
||||
+ |
||||
class DebuggerTests(unittest.TestCase): |
||||
|
||||
"""Test that the debugger can debug Python.""" |
||||
@@ -112,15 +125,28 @@ class DebuggerTests(unittest.TestCase): |
||||
'set print entry-values no', |
||||
|
||||
'run'] |
||||
+ |
||||
+ if HAS_AUTOLOAD_SAFEPATH: |
||||
+ # Recent GDBs will only auto-load scripts from certain safe |
||||
+ # locations. |
||||
+ # Where necessary, turn off this protection to ensure that |
||||
+ # our -gdb.py script can be loaded - but not on earlier gdb builds |
||||
+ # as this would generate noise on stderr (rhbz#817072): |
||||
+ init_commands = ['set auto-load safe-path /'] |
||||
+ else: |
||||
+ init_commands = [] |
||||
+ |
||||
if cmds_after_breakpoint: |
||||
commands += cmds_after_breakpoint |
||||
else: |
||||
commands += ['backtrace'] |
||||
|
||||
+ # print init_commands |
||||
# print commands |
||||
|
||||
# Use "commands" to generate the arguments with which to invoke "gdb": |
||||
args = ["gdb", "--batch"] |
||||
+ args += ['--init-eval-command=%s' % cmd for cmd in init_commands] |
||||
args += ['--eval-command=%s' % cmd for cmd in commands] |
||||
args += ["--args", |
||||
sys.executable] |
@ -0,0 +1,285 @@
@@ -0,0 +1,285 @@
|
||||
diff -up Python-2.7.3/Doc/library/crypt.rst.crypt-module-salt-backport Python-2.7.3/Doc/library/crypt.rst |
||||
--- Python-2.7.3/Doc/library/crypt.rst.crypt-module-salt-backport 2012-04-09 19:07:28.000000000 -0400 |
||||
+++ Python-2.7.3/Doc/library/crypt.rst 2013-02-19 16:44:20.465334062 -0500 |
||||
@@ -16,9 +16,9 @@ |
||||
|
||||
This module implements an interface to the :manpage:`crypt(3)` routine, which is |
||||
a one-way hash function based upon a modified DES algorithm; see the Unix man |
||||
-page for further details. Possible uses include allowing Python scripts to |
||||
-accept typed passwords from the user, or attempting to crack Unix passwords with |
||||
-a dictionary. |
||||
+page for further details. Possible uses include storing hashed passwords |
||||
+so you can check passwords without storing the actual password, or attempting |
||||
+to crack Unix passwords with a dictionary. |
||||
|
||||
.. index:: single: crypt(3) |
||||
|
||||
@@ -27,15 +27,81 @@ the :manpage:`crypt(3)` routine in the r |
||||
extensions available on the current implementation will also be available on |
||||
this module. |
||||
|
||||
+Hashing Methods |
||||
+--------------- |
||||
|
||||
-.. function:: crypt(word, salt) |
||||
+The :mod:`crypt` module defines the list of hashing methods (not all methods |
||||
+are available on all platforms): |
||||
+ |
||||
+.. data:: METHOD_SHA512 |
||||
+ |
||||
+ A Modular Crypt Format method with 16 character salt and 86 character |
||||
+ hash. This is the strongest method. |
||||
+ |
||||
+.. versionadded:: 3.3 |
||||
+ |
||||
+.. data:: METHOD_SHA256 |
||||
+ |
||||
+ Another Modular Crypt Format method with 16 character salt and 43 |
||||
+ character hash. |
||||
+ |
||||
+.. versionadded:: 3.3 |
||||
+ |
||||
+.. data:: METHOD_MD5 |
||||
+ |
||||
+ Another Modular Crypt Format method with 8 character salt and 22 |
||||
+ character hash. |
||||
+ |
||||
+.. versionadded:: 3.3 |
||||
+ |
||||
+.. data:: METHOD_CRYPT |
||||
+ |
||||
+ The traditional method with a 2 character salt and 13 characters of |
||||
+ hash. This is the weakest method. |
||||
+ |
||||
+.. versionadded:: 3.3 |
||||
+ |
||||
+ |
||||
+Module Attributes |
||||
+----------------- |
||||
+ |
||||
+ |
||||
+.. attribute:: methods |
||||
+ |
||||
+ A list of available password hashing algorithms, as |
||||
+ ``crypt.METHOD_*`` objects. This list is sorted from strongest to |
||||
+ weakest, and is guaranteed to have at least ``crypt.METHOD_CRYPT``. |
||||
+ |
||||
+.. versionadded:: 3.3 |
||||
+ |
||||
+ |
||||
+Module Functions |
||||
+---------------- |
||||
+ |
||||
+The :mod:`crypt` module defines the following functions: |
||||
+ |
||||
+.. function:: crypt(word, salt=None) |
||||
|
||||
*word* will usually be a user's password as typed at a prompt or in a graphical |
||||
- interface. *salt* is usually a random two-character string which will be used |
||||
- to perturb the DES algorithm in one of 4096 ways. The characters in *salt* must |
||||
- be in the set ``[./a-zA-Z0-9]``. Returns the hashed password as a string, which |
||||
- will be composed of characters from the same alphabet as the salt (the first two |
||||
- characters represent the salt itself). |
||||
+ interface. The optional *salt* is either a string as returned from |
||||
+ :func:`mksalt`, one of the ``crypt.METHOD_*`` values (though not all |
||||
+ may be available on all platforms), or a full encrypted password |
||||
+ including salt, as returned by this function. If *salt* is not |
||||
+ provided, the strongest method will be used (as returned by |
||||
+ :func:`methods`. |
||||
+ |
||||
+ Checking a password is usually done by passing the plain-text password |
||||
+ as *word* and the full results of a previous :func:`crypt` call, |
||||
+ which should be the same as the results of this call. |
||||
+ |
||||
+ *salt* (either a random 2 or 16 character string, possibly prefixed with |
||||
+ ``$digit$`` to indicate the method) which will be used to perturb the |
||||
+ encryption algorithm. The characters in *salt* must be in the set |
||||
+ ``[./a-zA-Z0-9]``, with the exception of Modular Crypt Format which |
||||
+ prefixes a ``$digit$``. |
||||
+ |
||||
+ Returns the hashed password as a string, which will be composed of |
||||
+ characters from the same alphabet as the salt. |
||||
|
||||
.. index:: single: crypt(3) |
||||
|
||||
@@ -43,6 +109,27 @@ this module. |
||||
different sizes in the *salt*, it is recommended to use the full crypted |
||||
password as salt when checking for a password. |
||||
|
||||
+.. versionchanged:: 3.3 |
||||
+ Before version 3.3, *salt* must be specified as a string and cannot |
||||
+ accept ``crypt.METHOD_*`` values (which don't exist anyway). |
||||
+ |
||||
+ |
||||
+.. function:: mksalt(method=None) |
||||
+ |
||||
+ Return a randomly generated salt of the specified method. If no |
||||
+ *method* is given, the strongest method available as returned by |
||||
+ :func:`methods` is used. |
||||
+ |
||||
+ The return value is a string either of 2 characters in length for |
||||
+ ``crypt.METHOD_CRYPT``, or 19 characters starting with ``$digit$`` and |
||||
+ 16 random characters from the set ``[./a-zA-Z0-9]``, suitable for |
||||
+ passing as the *salt* argument to :func:`crypt`. |
||||
+ |
||||
+.. versionadded:: 3.3 |
||||
+ |
||||
+Examples |
||||
+-------- |
||||
+ |
||||
A simple example illustrating typical use:: |
||||
|
||||
import crypt, getpass, pwd |
||||
@@ -59,3 +146,11 @@ A simple example illustrating typical us |
||||
else: |
||||
return 1 |
||||
|
||||
+To generate a hash of a password using the strongest available method and |
||||
+check it against the original:: |
||||
+ |
||||
+ import crypt |
||||
+ |
||||
+ hashed = crypt.crypt(plaintext) |
||||
+ if hashed != crypt.crypt(plaintext, hashed): |
||||
+ raise "Hashed version doesn't validate against original" |
||||
diff -up Python-2.7.3/Lib/crypt.py.crypt-module-salt-backport Python-2.7.3/Lib/crypt.py |
||||
--- Python-2.7.3/Lib/crypt.py.crypt-module-salt-backport 2013-02-19 16:44:20.465334062 -0500 |
||||
+++ Python-2.7.3/Lib/crypt.py 2013-02-19 16:49:56.425311089 -0500 |
||||
@@ -0,0 +1,71 @@ |
||||
+"""Wrapper to the POSIX crypt library call and associated functionality. |
||||
+ |
||||
+Note that the ``methods`` and ``METHOD_*`` attributes are non-standard |
||||
+extensions to Python 2.7, backported from 3.3""" |
||||
+ |
||||
+import _crypt |
||||
+import string as _string |
||||
+from random import SystemRandom as _SystemRandom |
||||
+from collections import namedtuple as _namedtuple |
||||
+ |
||||
+ |
||||
+_saltchars = _string.ascii_letters + _string.digits + './' |
||||
+_sr = _SystemRandom() |
||||
+ |
||||
+ |
||||
+class _Method(_namedtuple('_Method', 'name ident salt_chars total_size')): |
||||
+ |
||||
+ """Class representing a salt method per the Modular Crypt Format or the |
||||
+ legacy 2-character crypt method.""" |
||||
+ |
||||
+ def __repr__(self): |
||||
+ return '<crypt.METHOD_%s>' % self.name |
||||
+ |
||||
+ |
||||
+def mksalt(method=None): |
||||
+ """Generate a salt for the specified method. |
||||
+ |
||||
+ If not specified, the strongest available method will be used. |
||||
+ |
||||
+ This is a non-standard extension to Python 2.7, backported from 3.3 |
||||
+ """ |
||||
+ if method is None: |
||||
+ method = methods[0] |
||||
+ s = '$%s$' % method.ident if method.ident else '' |
||||
+ s += ''.join(_sr.sample(_saltchars, method.salt_chars)) |
||||
+ return s |
||||
+ |
||||
+ |
||||
+def crypt(word, salt=None): |
||||
+ """Return a string representing the one-way hash of a password, with a salt |
||||
+ prepended. |
||||
+ |
||||
+ If ``salt`` is not specified or is ``None``, the strongest |
||||
+ available method will be selected and a salt generated. Otherwise, |
||||
+ ``salt`` may be one of the ``crypt.METHOD_*`` values, or a string as |
||||
+ returned by ``crypt.mksalt()``. |
||||
+ |
||||
+ Note that these are non-standard extensions to Python 2.7's crypt.crypt() |
||||
+ entrypoint, backported from 3.3: the standard Python 2.7 crypt.crypt() |
||||
+ entrypoint requires two strings as the parameters, and does not support |
||||
+ keyword arguments. |
||||
+ """ |
||||
+ if salt is None or isinstance(salt, _Method): |
||||
+ salt = mksalt(salt) |
||||
+ return _crypt.crypt(word, salt) |
||||
+ |
||||
+ |
||||
+# available salting/crypto methods |
||||
+METHOD_CRYPT = _Method('CRYPT', None, 2, 13) |
||||
+METHOD_MD5 = _Method('MD5', '1', 8, 34) |
||||
+METHOD_SHA256 = _Method('SHA256', '5', 16, 63) |
||||
+METHOD_SHA512 = _Method('SHA512', '6', 16, 106) |
||||
+ |
||||
+methods = [] |
||||
+for _method in (METHOD_SHA512, METHOD_SHA256, METHOD_MD5): |
||||
+ _result = crypt('', _method) |
||||
+ if _result and len(_result) == _method.total_size: |
||||
+ methods.append(_method) |
||||
+methods.append(METHOD_CRYPT) |
||||
+del _result, _method |
||||
+ |
||||
diff -up Python-2.7.3/Lib/test/test_crypt.py.crypt-module-salt-backport Python-2.7.3/Lib/test/test_crypt.py |
||||
--- Python-2.7.3/Lib/test/test_crypt.py.crypt-module-salt-backport 2012-04-09 19:07:31.000000000 -0400 |
||||
+++ Python-2.7.3/Lib/test/test_crypt.py 2013-02-19 16:44:20.465334062 -0500 |
||||
@@ -10,6 +10,25 @@ class CryptTestCase(unittest.TestCase): |
||||
if test_support.verbose: |
||||
print 'Test encryption: ', c |
||||
|
||||
+ def test_salt(self): |
||||
+ self.assertEqual(len(crypt._saltchars), 64) |
||||
+ for method in crypt.methods: |
||||
+ salt = crypt.mksalt(method) |
||||
+ self.assertEqual(len(salt), |
||||
+ method.salt_chars + (3 if method.ident else 0)) |
||||
+ |
||||
+ def test_saltedcrypt(self): |
||||
+ for method in crypt.methods: |
||||
+ pw = crypt.crypt('assword', method) |
||||
+ self.assertEqual(len(pw), method.total_size) |
||||
+ pw = crypt.crypt('assword', crypt.mksalt(method)) |
||||
+ self.assertEqual(len(pw), method.total_size) |
||||
+ |
||||
+ def test_methods(self): |
||||
+ # Gurantee that METHOD_CRYPT is the last method in crypt.methods. |
||||
+ self.assertTrue(len(crypt.methods) >= 1) |
||||
+ self.assertEqual(crypt.METHOD_CRYPT, crypt.methods[-1]) |
||||
+ |
||||
def test_main(): |
||||
test_support.run_unittest(CryptTestCase) |
||||
|
||||
diff -up Python-2.7.3/Modules/cryptmodule.c.crypt-module-salt-backport Python-2.7.3/Modules/cryptmodule.c |
||||
--- Python-2.7.3/Modules/cryptmodule.c.crypt-module-salt-backport 2012-04-09 19:07:34.000000000 -0400 |
||||
+++ Python-2.7.3/Modules/cryptmodule.c 2013-02-19 16:44:20.466334063 -0500 |
||||
@@ -43,7 +43,7 @@ static PyMethodDef crypt_methods[] = { |
||||
}; |
||||
|
||||
PyMODINIT_FUNC |
||||
-initcrypt(void) |
||||
+init_crypt(void) |
||||
{ |
||||
- Py_InitModule("crypt", crypt_methods); |
||||
+ Py_InitModule("_crypt", crypt_methods); |
||||
} |
||||
diff -up Python-2.7.3/Modules/Setup.dist.crypt-module-salt-backport Python-2.7.3/Modules/Setup.dist |
||||
--- Python-2.7.3/Modules/Setup.dist.crypt-module-salt-backport 2013-02-19 16:44:20.463334063 -0500 |
||||
+++ Python-2.7.3/Modules/Setup.dist 2013-02-19 16:44:20.466334063 -0500 |
||||
@@ -221,7 +221,7 @@ _ssl _ssl.c \ |
||||
# |
||||
# First, look at Setup.config; configure may have set this for you. |
||||
|
||||
-crypt cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems |
||||
+_crypt _cryptmodule.c -lcrypt # crypt(3); needs -lcrypt on some systems |
||||
|
||||
|
||||
# Some more UNIX dependent modules -- off by default, since these |
||||
diff -up Python-2.7.3/setup.py.crypt-module-salt-backport Python-2.7.3/setup.py |
||||
--- Python-2.7.3/setup.py.crypt-module-salt-backport 2013-02-19 16:44:20.425334067 -0500 |
||||
+++ Python-2.7.3/setup.py 2013-02-19 16:44:20.466334063 -0500 |
||||
@@ -693,7 +693,7 @@ class PyBuildExt(build_ext): |
||||
libs = ['crypt'] |
||||
else: |
||||
libs = [] |
||||
- exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) |
||||
+ exts.append( Extension('_crypt', ['_cryptmodule.c'], libraries=libs) ) |
||||
|
||||
# CSV files |
||||
exts.append( Extension('_csv', ['_csv.c']) ) |
@ -0,0 +1,125 @@
@@ -0,0 +1,125 @@
|
||||
diff -up Python-2.7.3/Tools/gdb/libpython.py.fix-fake-repr-in-gdb-hooks Python-2.7.3/Tools/gdb/libpython.py |
||||
--- Python-2.7.3/Tools/gdb/libpython.py.fix-fake-repr-in-gdb-hooks 2013-02-19 17:21:33.541181366 -0500 |
||||
+++ Python-2.7.3/Tools/gdb/libpython.py 2013-02-19 17:21:42.090180782 -0500 |
||||
@@ -105,6 +105,24 @@ class TruncatedStringIO(object): |
||||
def getvalue(self): |
||||
return self._val |
||||
|
||||
+class FakeProxy(object): |
||||
+ """ |
||||
+ Class representing a non-descript PyObject* value in the inferior |
||||
+ process for when we don't have a custom scraper, intended to have |
||||
+ a sane repr(). |
||||
+ """ |
||||
+ def __init__(self, tp_name, address): |
||||
+ self.tp_name = tp_name |
||||
+ self.address = address |
||||
+ |
||||
+ def __repr__(self): |
||||
+ # For the NULL pointer, we have no way of knowing a type, so |
||||
+ # special-case it as per |
||||
+ # http://bugs.python.org/issue8032#msg100882 |
||||
+ if self.address == 0: |
||||
+ return '0x0' |
||||
+ return '<%s at remote 0x%x>' % (self.tp_name, self.address) |
||||
+ |
||||
class PyObjectPtr(object): |
||||
""" |
||||
Class wrapping a gdb.Value that's a either a (PyObject*) within the |
||||
@@ -232,28 +250,8 @@ class PyObjectPtr(object): |
||||
visiting object graphs with loops). Analogous to Py_ReprEnter and |
||||
Py_ReprLeave |
||||
''' |
||||
- |
||||
- class FakeRepr(object): |
||||
- """ |
||||
- Class representing a non-descript PyObject* value in the inferior |
||||
- process for when we don't have a custom scraper, intended to have |
||||
- a sane repr(). |
||||
- """ |
||||
- |
||||
- def __init__(self, tp_name, address): |
||||
- self.tp_name = tp_name |
||||
- self.address = address |
||||
- |
||||
- def __repr__(self): |
||||
- # For the NULL pointer, we have no way of knowing a type, so |
||||
- # special-case it as per |
||||
- # http://bugs.python.org/issue8032#msg100882 |
||||
- if self.address == 0: |
||||
- return '0x0' |
||||
- return '<%s at remote 0x%x>' % (self.tp_name, self.address) |
||||
- |
||||
- return FakeRepr(self.safe_tp_name(), |
||||
- long(self._gdbval)) |
||||
+ return FakeProxy(self.safe_tp_name(), |
||||
+ long(self._gdbval)) |
||||
|
||||
def write_repr(self, out, visited): |
||||
''' |
||||
@@ -384,7 +382,7 @@ def _write_instance_repr(out, visited, n |
||||
if not first: |
||||
out.write(', ') |
||||
first = False |
||||
- out.write(pyop_arg.proxyval(visited)) |
||||
+ out.write(str(pyop_arg.proxyval(visited))) |
||||
out.write('=') |
||||
pyop_val.write_repr(out, visited) |
||||
out.write(')') |
||||
@@ -785,6 +783,8 @@ class PyNoneStructPtr(PyObjectPtr): |
||||
def proxyval(self, visited): |
||||
return None |
||||
|
||||
+class CantReadFilename(ValueError): |
||||
+ pass |
||||
|
||||
class PyFrameObjectPtr(PyObjectPtr): |
||||
_typename = 'PyFrameObject' |
||||
@@ -861,7 +861,10 @@ class PyFrameObjectPtr(PyObjectPtr): |
||||
'''Get the path of the current Python source file, as a string''' |
||||
if self.is_optimized_out(): |
||||
return '(frame information optimized out)' |
||||
- return self.co_filename.proxyval(set()) |
||||
+ value = self.co_filename.proxyval(set()) |
||||
+ if isinstance(value, FakeProxy): |
||||
+ raise CantReadFilename('unable to extract filename)') |
||||
+ return value |
||||
|
||||
def current_line_num(self): |
||||
'''Get current line number as an integer (1-based) |
||||
@@ -907,7 +910,7 @@ class PyFrameObjectPtr(PyObjectPtr): |
||||
out.write(', ') |
||||
first = False |
||||
|
||||
- out.write(pyop_name.proxyval(visited)) |
||||
+ out.write(str(pyop_name.proxyval(visited))) |
||||
out.write('=') |
||||
pyop_value.write_repr(out, visited) |
||||
|
||||
@@ -1252,8 +1255,11 @@ class Frame(object): |
||||
if pyop: |
||||
sys.stdout.write('#%i %s\n' % (self.get_index(), pyop.get_truncated_repr(MAX_OUTPUT_LEN))) |
||||
if not pyop.is_optimized_out(): |
||||
- line = pyop.current_line() |
||||
- sys.stdout.write(' %s\n' % line.strip()) |
||||
+ try: |
||||
+ line = pyop.current_line() |
||||
+ sys.stdout.write(' %s\n' % line.strip()) |
||||
+ except CantReadFilename: |
||||
+ sys.stdout.write(' %s\n' % '(unable to read filename)') |
||||
else: |
||||
sys.stdout.write('#%i (unable to read python frame information)\n' % self.get_index()) |
||||
else: |
||||
@@ -1303,7 +1309,11 @@ class PyList(gdb.Command): |
||||
print 'Unable to read information on python frame' |
||||
return |
||||
|
||||
- filename = pyop.filename() |
||||
+ try: |
||||
+ filename = pyop.filename() |
||||
+ except CantReadFilename: |
||||
+ print "Unable to extract filename from python frame" |
||||
+ return |
||||
lineno = pyop.current_line_num() |
||||
|
||||
if start is None: |
@ -0,0 +1,43 @@
@@ -0,0 +1,43 @@
|
||||
diff -up Python-2.7.3/Lib/test/test_gdb.py.disable-stack-navigation-tests-when-optimized-in-test_gdb Python-2.7.3/Lib/test/test_gdb.py |
||||
--- Python-2.7.3/Lib/test/test_gdb.py.disable-stack-navigation-tests-when-optimized-in-test_gdb 2013-02-20 12:27:05.669526425 -0500 |
||||
+++ Python-2.7.3/Lib/test/test_gdb.py 2013-02-20 12:27:05.715526422 -0500 |
||||
@@ -653,10 +653,10 @@ class PyListTests(DebuggerTests): |
||||
' 3 def foo(a, b, c):\n', |
||||
bt) |
||||
|
||||
+@unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") |
||||
+@unittest.skipIf(python_is_optimized(), |
||||
+ "Python was compiled with optimizations") |
||||
class StackNavigationTests(DebuggerTests): |
||||
- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") |
||||
- @unittest.skipIf(python_is_optimized(), |
||||
- "Python was compiled with optimizations") |
||||
def test_pyup_command(self): |
||||
'Verify that the "py-up" command works' |
||||
bt = self.get_stack_trace(script=self.get_sample_script(), |
||||
@@ -667,7 +667,6 @@ class StackNavigationTests(DebuggerTests |
||||
baz\(a, b, c\) |
||||
$''') |
||||
|
||||
- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") |
||||
def test_down_at_bottom(self): |
||||
'Verify handling of "py-down" at the bottom of the stack' |
||||
bt = self.get_stack_trace(script=self.get_sample_script(), |
||||
@@ -675,7 +674,6 @@ $''') |
||||
self.assertEndsWith(bt, |
||||
'Unable to find a newer python frame\n') |
||||
|
||||
- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") |
||||
def test_up_at_top(self): |
||||
'Verify handling of "py-up" at the top of the stack' |
||||
bt = self.get_stack_trace(script=self.get_sample_script(), |
||||
@@ -683,9 +681,6 @@ $''') |
||||
self.assertEndsWith(bt, |
||||
'Unable to find an older python frame\n') |
||||
|
||||
- @unittest.skipUnless(HAS_PYUP_PYDOWN, "test requires py-up/py-down commands") |
||||
- @unittest.skipIf(python_is_optimized(), |
||||
- "Python was compiled with optimizations") |
||||
def test_up_then_down(self): |
||||
'Verify "py-up" followed by "py-down"' |
||||
bt = self.get_stack_trace(script=self.get_sample_script(), |
@ -0,0 +1,12 @@
@@ -0,0 +1,12 @@
|
||||
diff -up Python-2.6.6/Lib/distutils/sysconfig.py.distutils-cflags Python-2.6.6/Lib/distutils/sysconfig.py |
||||
--- Python-2.6.6/Lib/distutils/sysconfig.py.distutils-cflags 2011-08-12 17:18:17.833091153 -0400 |
||||
+++ Python-2.6.6/Lib/distutils/sysconfig.py 2011-08-12 17:18:27.449106938 -0400 |
||||
@@ -187,7 +187,7 @@ def customize_compiler(compiler): |
||||
if 'LDFLAGS' in os.environ: |
||||
ldshared = ldshared + ' ' + os.environ['LDFLAGS'] |
||||
if 'CFLAGS' in os.environ: |
||||
- cflags = opt + ' ' + os.environ['CFLAGS'] |
||||
+ cflags = cflags + ' ' + os.environ['CFLAGS'] |
||||
ldshared = ldshared + ' ' + os.environ['CFLAGS'] |
||||
if 'CPPFLAGS' in os.environ: |
||||
cpp = cpp + ' ' + os.environ['CPPFLAGS'] |
@ -0,0 +1,41 @@
@@ -0,0 +1,41 @@
|
||||
diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py |
||||
--- a/Lib/multiprocessing/connection.py |
||||
+++ b/Lib/multiprocessing/connection.py |
||||
@@ -41,6 +41,10 @@ |
||||
# A very generous timeout when it comes to local connections... |
||||
CONNECTION_TIMEOUT = 20. |
||||
|
||||
+# The hmac module implicitly defaults to using MD5. |
||||
+# Support using a stronger algorithm for the challenge/response code: |
||||
+HMAC_DIGEST_NAME='sha256' |
||||
+ |
||||
_mmap_counter = itertools.count() |
||||
|
||||
default_family = 'AF_INET' |
||||
@@ -700,12 +704,16 @@ |
||||
WELCOME = b'#WELCOME#' |
||||
FAILURE = b'#FAILURE#' |
||||
|
||||
+def get_digestmod_for_hmac(): |
||||
+ import hashlib |
||||
+ return getattr(hashlib, HMAC_DIGEST_NAME) |
||||
+ |
||||
def deliver_challenge(connection, authkey): |
||||
import hmac |
||||
assert isinstance(authkey, bytes) |
||||
message = os.urandom(MESSAGE_LENGTH) |
||||
connection.send_bytes(CHALLENGE + message) |
||||
- digest = hmac.new(authkey, message).digest() |
||||
+ digest = hmac.new(authkey, message, get_digestmod_for_hmac()).digest() |
||||
response = connection.recv_bytes(256) # reject large message |
||||
if response == digest: |
||||
connection.send_bytes(WELCOME) |
||||
@@ -719,7 +727,7 @@ |
||||
message = connection.recv_bytes(256) # reject large message |
||||
assert message[:len(CHALLENGE)] == CHALLENGE, 'message = %r' % message |
||||
message = message[len(CHALLENGE):] |
||||
- digest = hmac.new(authkey, message).digest() |
||||
+ digest = hmac.new(authkey, message, get_digestmod_for_hmac()).digest() |
||||
connection.send_bytes(digest) |
||||
response = connection.recv_bytes(256) # reject large message |
||||
if response != WELCOME: |
@ -0,0 +1,13 @@
@@ -0,0 +1,13 @@
|
||||
diff -up Python-2.7.3/Lib/test/test_support.py.rhbz913732 Python-2.7.3/Lib/test/test_support.py |
||||
--- Python-2.7.3/Lib/test/test_support.py.rhbz913732 2013-03-04 16:11:53.757315921 -0500 |
||||
+++ Python-2.7.3/Lib/test/test_support.py 2013-03-04 16:12:11.331314722 -0500 |
||||
@@ -304,7 +304,8 @@ def bind_port(sock, host=HOST): |
||||
if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1: |
||||
raise TestFailed("tests should never set the SO_REUSEADDR " \ |
||||
"socket option on TCP/IP sockets!") |
||||
- if hasattr(socket, 'SO_REUSEPORT'): |
||||
+ if hasattr(socket, 'SO_REUSEPORT') \ |
||||
+ and 'WITHIN_PYTHON_RPM_BUILD' not in os.environ: # rhbz#913732 |
||||
if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1: |
||||
raise TestFailed("tests should never set the SO_REUSEPORT " \ |
||||
"socket option on TCP/IP sockets!") |
@ -0,0 +1,28 @@
@@ -0,0 +1,28 @@
|
||||
diff -up Python-2.7.3/Modules/getpath.c.fix-for-usr-move Python-2.7.3/Modules/getpath.c |
||||
--- Python-2.7.3/Modules/getpath.c.fix-for-usr-move 2013-03-06 14:25:32.801828698 -0500 |
||||
+++ Python-2.7.3/Modules/getpath.c 2013-03-06 15:59:30.872443168 -0500 |
||||
@@ -510,6 +510,24 @@ calculate_path(void) |
||||
MAXPATHLEN bytes long. |
||||
*/ |
||||
|
||||
+ /* |
||||
+ Workaround for rhbz#817554, where an empty argv0_path erroneously |
||||
+ locates "prefix" as "/lib[64]/python2.7" due to it finding |
||||
+ "/lib[64]/python2.7/os.py" via the /lib -> /usr/lib symlink for |
||||
+ https://fedoraproject.org/wiki/Features/UsrMove |
||||
+ */ |
||||
+ if (argv0_path[0] == '\0' && 0 == strcmp(prog, "cmpi_swig")) { |
||||
+ /* |
||||
+ We have an empty argv0_path, presumably because prog aka |
||||
+ Py_GetProgramName() was not found on $PATH. |
||||
+ |
||||
+ Set argv0_path to "/usr/" so that search_for_prefix() and |
||||
+ search_for_exec_prefix() don't erroneously pick up |
||||
+ on /lib/ via the UsrMove symlink: |
||||
+ */ |
||||
+ strcpy(argv0_path, "/usr/"); |
||||
+ } |
||||
+ |
||||
if (!(pfound = search_for_prefix(argv0_path, home))) { |
||||
if (!Py_FrozenFlag) |
||||
fprintf(stderr, |
@ -1,13 +1,12 @@
@@ -1,13 +1,12 @@
|
||||
diff --git a/config.sub b/config.sub |
||||
index 40ea5df..932128b 100755 |
||||
--- a/config.sub |
||||
+++ b/config.sub |
||||
@@ -1045,7 +1045,7 @@ case $basic_machine in |
||||
diff -r de35eae9048a config.sub |
||||
--- a/config.sub Wed Apr 24 23:33:20 2013 +0200 |
||||
+++ b/config.sub Thu Apr 25 08:51:00 2013 +0200 |
||||
@@ -1008,7 +1008,7 @@ |
||||
;; |
||||
ppc64) basic_machine=powerpc64-unknown |
||||
;; |
||||
- ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` |
||||
+ ppc64-* | ppc64p7-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` |
||||
;; |
||||
ppc64le | powerpc64little) |
||||
ppc64le | powerpc64little | ppc64-le | powerpc64-little) |
||||
basic_machine=powerpc64le-unknown |
||||
|
@ -0,0 +1,70 @@
@@ -0,0 +1,70 @@
|
||||
diff --git a/Lib/threading.py b/Lib/threading.py |
||||
index cb49c4a..c9795a5 100644 |
||||
--- a/Lib/threading.py |
||||
+++ b/Lib/threading.py |
||||
@@ -305,7 +305,7 @@ class _Condition(_Verbose): |
||||
else: |
||||
return True |
||||
|
||||
- def wait(self, timeout=None): |
||||
+ def wait(self, timeout=None, balancing=True): |
||||
"""Wait until notified or until a timeout occurs. |
||||
|
||||
If the calling thread has not acquired the lock when this method is |
||||
@@ -354,7 +354,10 @@ class _Condition(_Verbose): |
||||
remaining = endtime - _time() |
||||
if remaining <= 0: |
||||
break |
||||
- delay = min(delay * 2, remaining, .05) |
||||
+ if balancing: |
||||
+ delay = min(delay * 2, remaining, 0.05) |
||||
+ else: |
||||
+ delay = remaining |
||||
_sleep(delay) |
||||
if not gotit: |
||||
if __debug__: |
||||
@@ -599,7 +602,7 @@ class _Event(_Verbose): |
||||
finally: |
||||
self.__cond.release() |
||||
|
||||
- def wait(self, timeout=None): |
||||
+ def wait(self, timeout=None, balancing=True): |
||||
"""Block until the internal flag is true. |
||||
|
||||
If the internal flag is true on entry, return immediately. Otherwise, |
||||
@@ -617,7 +620,7 @@ class _Event(_Verbose): |
||||
self.__cond.acquire() |
||||
try: |
||||
if not self.__flag: |
||||
- self.__cond.wait(timeout) |
||||
+ self.__cond.wait(timeout, balancing) |
||||
return self.__flag |
||||
finally: |
||||
self.__cond.release() |
||||
@@ -908,7 +911,7 @@ class Thread(_Verbose): |
||||
if 'dummy_threading' not in _sys.modules: |
||||
raise |
||||
|
||||
- def join(self, timeout=None): |
||||
+ def join(self, timeout=None, balancing=True): |
||||
"""Wait until the thread terminates. |
||||
|
||||
This blocks the calling thread until the thread whose join() method is |
||||
@@ -957,7 +960,7 @@ class Thread(_Verbose): |
||||
if __debug__: |
||||
self._note("%s.join(): timed out", self) |
||||
break |
||||
- self.__block.wait(delay) |
||||
+ self.__block.wait(delay, balancing) |
||||
else: |
||||
if __debug__: |
||||
self._note("%s.join(): thread stopped", self) |
||||
@@ -1143,7 +1146,7 @@ class _DummyThread(Thread): |
||||
def _set_daemon(self): |
||||
return True |
||||
|
||||
- def join(self, timeout=None): |
||||
+ def join(self, timeout=None, balancing=True): |
||||
assert False, "cannot join a dummy thread" |
||||
|
||||
|
@ -0,0 +1,13 @@
@@ -0,0 +1,13 @@
|
||||
diff -up Python-2.7.5/setup.py.orig Python-2.7.5/setup.py |
||||
--- Python-2.7.5/setup.py.orig 2013-07-17 15:20:12.086820082 +0200 |
||||
+++ Python-2.7.5/setup.py 2013-07-17 15:21:28.490023903 +0200 |
||||
@@ -2050,7 +2050,8 @@ class PyBuildExt(build_ext): |
||||
if not line: |
||||
ffi_inc = None |
||||
break |
||||
- if line.startswith('#define LIBFFI_H'): |
||||
+ if line.startswith('#define LIBFFI_H') or \ |
||||
+ line.startswith('#define ffi_wrapper_h'): |
||||
break |
||||
ffi_lib = None |
||||
if ffi_inc is not None: |
@ -0,0 +1,12 @@
@@ -0,0 +1,12 @@
|
||||
diff -up Python-2.7.5/Lib/urllib2.py.orig Python-2.7.5/Lib/urllib2.py |
||||
--- Python-2.7.5/Lib/urllib2.py.orig 2013-07-17 12:22:58.595525622 +0200 |
||||
+++ Python-2.7.5/Lib/urllib2.py 2013-07-17 12:19:59.875898030 +0200 |
||||
@@ -728,6 +728,8 @@ class ProxyHandler(BaseHandler): |
||||
if proxy_type is None: |
||||
proxy_type = orig_type |
||||
|
||||
+ req.get_host() |
||||
+ |
||||
if req.host and proxy_bypass(req.host): |
||||
return None |
||||
|
@ -0,0 +1,57 @@
@@ -0,0 +1,57 @@
|
||||
--- Python-2.7.5/Python/marshal.c 2013-05-12 05:32:53.000000000 +0200 |
||||
+++ /home/rkuska/hg/cpython/Python/marshal.c 2013-07-18 10:33:26.392486235 +0200 |
||||
@@ -88,7 +88,7 @@ |
||||
} |
||||
|
||||
static void |
||||
-w_string(char *s, Py_ssize_t n, WFILE *p) |
||||
+w_string(const char *s, Py_ssize_t n, WFILE *p) |
||||
{ |
||||
if (p->fp != NULL) { |
||||
fwrite(s, 1, n, p->fp); |
||||
@@ -141,6 +141,13 @@ |
||||
# define W_SIZE w_long |
||||
#endif |
||||
|
||||
+static void |
||||
+w_pstring(const char *s, Py_ssize_t n, WFILE *p) |
||||
+{ |
||||
+ W_SIZE(n, p); |
||||
+ w_string(s, n, p); |
||||
+} |
||||
+ |
||||
/* We assume that Python longs are stored internally in base some power of |
||||
2**15; for the sake of portability we'll always read and write them in base |
||||
exactly 2**15. */ |
||||
@@ -338,9 +345,7 @@ |
||||
else { |
||||
w_byte(TYPE_STRING, p); |
||||
} |
||||
- n = PyString_GET_SIZE(v); |
||||
- W_SIZE(n, p); |
||||
- w_string(PyString_AS_STRING(v), n, p); |
||||
+ w_pstring(PyBytes_AS_STRING(v), PyString_GET_SIZE(v), p); |
||||
} |
||||
#ifdef Py_USING_UNICODE |
||||
else if (PyUnicode_CheckExact(v)) { |
||||
@@ -352,9 +357,7 @@ |
||||
return; |
||||
} |
||||
w_byte(TYPE_UNICODE, p); |
||||
- n = PyString_GET_SIZE(utf8); |
||||
- W_SIZE(n, p); |
||||
- w_string(PyString_AS_STRING(utf8), n, p); |
||||
+ w_pstring(PyString_AS_STRING(utf8), PyString_GET_SIZE(utf8), p); |
||||
Py_DECREF(utf8); |
||||
} |
||||
#endif |
||||
@@ -441,8 +444,7 @@ |
||||
PyBufferProcs *pb = v->ob_type->tp_as_buffer; |
||||
w_byte(TYPE_STRING, p); |
||||
n = (*pb->bf_getreadbuffer)(v, 0, (void **)&s); |
||||
- W_SIZE(n, p); |
||||
- w_string(s, n, p); |
||||
+ w_pstring(s, n, p); |
||||
} |
||||
else { |
||||
w_byte(TYPE_UNKNOWN, p); |
@ -0,0 +1,25 @@
@@ -0,0 +1,25 @@
|
||||
diff -r e8b8279ca118 setup.py |
||||
--- a/setup.py Sun Jul 21 21:57:52 2013 -0400 |
||||
+++ b/setup.py Tue Aug 20 09:45:31 2013 +0200 |
||||
@@ -1480,12 +1480,21 @@ |
||||
'expat/xmltok_impl.h' |
||||
] |
||||
|
||||
+ # Add an explicit RPATH to pyexpat.so pointing at the directory |
||||
+ # containing the system expat (which has the extra XML_SetHashSalt |
||||
+ # symbol), to avoid an ImportError with a link error if there's an |
||||
+ # LD_LIBRARY_PATH containing a "vanilla" build of expat (without the |
||||
+ # symbol) (rhbz#833271): |
||||
+ EXPAT_RPATH = '/usr/lib64' if sys.maxint == 0x7fffffffffffffff else '/usr/lib' |
||||
+ |
||||
+ |
||||
exts.append(Extension('pyexpat', |
||||
define_macros = define_macros, |
||||
include_dirs = expat_inc, |
||||
libraries = expat_lib, |
||||
sources = ['pyexpat.c'] + expat_sources, |
||||
depends = expat_depends, |
||||
+ extra_link_args = ['-Wl,-rpath,%s' % EXPAT_RPATH] |
||||
)) |
||||
|
||||
# Fredrik Lundh's cElementTree module. Note that this also |
@ -0,0 +1,247 @@
@@ -0,0 +1,247 @@
|
||||
diff -r 9ddc63c039ba Lib/test/nullbytecert.pem |
||||
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 |
||||
+++ b/Lib/test/nullbytecert.pem Sun Aug 11 18:13:17 2013 +0200 |
||||
@@ -0,0 +1,90 @@ |
||||
+Certificate: |
||||
+ Data: |
||||
+ Version: 3 (0x2) |
||||
+ Serial Number: 0 (0x0) |
||||
+ Signature Algorithm: sha1WithRSAEncryption |
||||
+ Issuer: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev@python.org |
||||
+ Validity |
||||
+ Not Before: Aug 7 13:11:52 2013 GMT |
||||
+ Not After : Aug 7 13:12:52 2013 GMT |
||||
+ Subject: C=US, ST=Oregon, L=Beaverton, O=Python Software Foundation, OU=Python Core Development, CN=null.python.org\x00example.org/emailAddress=python-dev@python.org |
||||
+ Subject Public Key Info: |
||||
+ Public Key Algorithm: rsaEncryption |
||||
+ Public-Key: (2048 bit) |
||||
+ Modulus: |
||||
+ 00:b5:ea:ed:c9:fb:46:7d:6f:3b:76:80:dd:3a:f3: |
||||
+ 03:94:0b:a7:a6:db:ec:1d:df:ff:23:74:08:9d:97: |
||||
+ 16:3f:a3:a4:7b:3e:1b:0e:96:59:25:03:a7:26:e2: |
||||
+ 88:a9:cf:79:cd:f7:04:56:b0:ab:79:32:6e:59:c1: |
||||
+ 32:30:54:eb:58:a8:cb:91:f0:42:a5:64:27:cb:d4: |
||||
+ 56:31:88:52:ad:cf:bd:7f:f0:06:64:1f:cc:27:b8: |
||||
+ a3:8b:8c:f3:d8:29:1f:25:0b:f5:46:06:1b:ca:02: |
||||
+ 45:ad:7b:76:0a:9c:bf:bb:b9:ae:0d:16:ab:60:75: |
||||
+ ae:06:3e:9c:7c:31:dc:92:2f:29:1a:e0:4b:0c:91: |
||||
+ 90:6c:e9:37:c5:90:d7:2a:d7:97:15:a3:80:8f:5d: |
||||
+ 7b:49:8f:54:30:d4:97:2c:1c:5b:37:b5:ab:69:30: |
||||
+ 68:43:d3:33:78:4b:02:60:f5:3c:44:80:a1:8f:e7: |
||||
+ f0:0f:d1:5e:87:9e:46:cf:62:fc:f9:bf:0c:65:12: |
||||
+ f1:93:c8:35:79:3f:c8:ec:ec:47:f5:ef:be:44:d5: |
||||
+ ae:82:1e:2d:9a:9f:98:5a:67:65:e1:74:70:7c:cb: |
||||
+ d3:c2:ce:0e:45:49:27:dc:e3:2d:d4:fb:48:0e:2f: |
||||
+ 9e:77:b8:14:46:c0:c4:36:ca:02:ae:6a:91:8c:da: |
||||
+ 2f:85 |
||||
+ Exponent: 65537 (0x10001) |
||||
+ X509v3 extensions: |
||||
+ X509v3 Basic Constraints: critical |
||||
+ CA:FALSE |
||||
+ X509v3 Subject Key Identifier: |
||||
+ 88:5A:55:C0:52:FF:61:CD:52:A3:35:0F:EA:5A:9C:24:38:22:F7:5C |
||||
+ X509v3 Key Usage: |
||||
+ Digital Signature, Non Repudiation, Key Encipherment |
||||
+ X509v3 Subject Alternative Name: |
||||
+ ************************************************************* |
||||
+ WARNING: The values for DNS, email and URI are WRONG. OpenSSL |
||||
+ doesn't print the text after a NULL byte. |
||||
+ ************************************************************* |
||||
+ DNS:altnull.python.org, email:null@python.org, URI:http://null.python.org, IP Address:192.0.2.1, IP Address:2001:DB8:0:0:0:0:0:1 |
||||
+ Signature Algorithm: sha1WithRSAEncryption |
||||
+ ac:4f:45:ef:7d:49:a8:21:70:8e:88:59:3e:d4:36:42:70:f5: |
||||
+ a3:bd:8b:d7:a8:d0:58:f6:31:4a:b1:a4:a6:dd:6f:d9:e8:44: |
||||
+ 3c:b6:0a:71:d6:7f:b1:08:61:9d:60:ce:75:cf:77:0c:d2:37: |
||||
+ 86:02:8d:5e:5d:f9:0f:71:b4:16:a8:c1:3d:23:1c:f1:11:b3: |
||||
+ 56:6e:ca:d0:8d:34:94:e6:87:2a:99:f2:ae:ae:cc:c2:e8:86: |
||||
+ de:08:a8:7f:c5:05:fa:6f:81:a7:82:e6:d0:53:9d:34:f4:ac: |
||||
+ 3e:40:fe:89:57:7a:29:a4:91:7e:0b:c6:51:31:e5:10:2f:a4: |
||||
+ 60:76:cd:95:51:1a:be:8b:a1:b0:fd:ad:52:bd:d7:1b:87:60: |
||||
+ d2:31:c7:17:c4:18:4f:2d:08:25:a3:a7:4f:b7:92:ca:e2:f5: |
||||
+ 25:f1:54:75:81:9d:b3:3d:61:a2:f7:da:ed:e1:c6:6f:2c:60: |
||||
+ 1f:d8:6f:c5:92:05:ab:c9:09:62:49:a9:14:ad:55:11:cc:d6: |
||||
+ 4a:19:94:99:97:37:1d:81:5f:8b:cf:a3:a8:96:44:51:08:3d: |
||||
+ 0b:05:65:12:eb:b6:70:80:88:48:72:4f:c6:c2:da:cf:cd:8e: |
||||
+ 5b:ba:97:2f:60:b4:96:56:49:5e:3a:43:76:63:04:be:2a:f6: |
||||
+ c1:ca:a9:94 |
||||
+-----BEGIN CERTIFICATE----- |
||||
+MIIE2DCCA8CgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBxTELMAkGA1UEBhMCVVMx |
||||
+DzANBgNVBAgMBk9yZWdvbjESMBAGA1UEBwwJQmVhdmVydG9uMSMwIQYDVQQKDBpQ |
||||
+eXRob24gU29mdHdhcmUgRm91bmRhdGlvbjEgMB4GA1UECwwXUHl0aG9uIENvcmUg |
||||
+RGV2ZWxvcG1lbnQxJDAiBgNVBAMMG251bGwucHl0aG9uLm9yZwBleGFtcGxlLm9y |
||||
+ZzEkMCIGCSqGSIb3DQEJARYVcHl0aG9uLWRldkBweXRob24ub3JnMB4XDTEzMDgw |
||||
+NzEzMTE1MloXDTEzMDgwNzEzMTI1MlowgcUxCzAJBgNVBAYTAlVTMQ8wDQYDVQQI |
||||
+DAZPcmVnb24xEjAQBgNVBAcMCUJlYXZlcnRvbjEjMCEGA1UECgwaUHl0aG9uIFNv |
||||
+ZnR3YXJlIEZvdW5kYXRpb24xIDAeBgNVBAsMF1B5dGhvbiBDb3JlIERldmVsb3Bt |
||||
+ZW50MSQwIgYDVQQDDBtudWxsLnB5dGhvbi5vcmcAZXhhbXBsZS5vcmcxJDAiBgkq |
||||
+hkiG9w0BCQEWFXB5dGhvbi1kZXZAcHl0aG9uLm9yZzCCASIwDQYJKoZIhvcNAQEB |
||||
+BQADggEPADCCAQoCggEBALXq7cn7Rn1vO3aA3TrzA5QLp6bb7B3f/yN0CJ2XFj+j |
||||
+pHs+Gw6WWSUDpybiiKnPec33BFawq3kyblnBMjBU61ioy5HwQqVkJ8vUVjGIUq3P |
||||
+vX/wBmQfzCe4o4uM89gpHyUL9UYGG8oCRa17dgqcv7u5rg0Wq2B1rgY+nHwx3JIv |
||||
+KRrgSwyRkGzpN8WQ1yrXlxWjgI9de0mPVDDUlywcWze1q2kwaEPTM3hLAmD1PESA |
||||
+oY/n8A/RXoeeRs9i/Pm/DGUS8ZPINXk/yOzsR/XvvkTVroIeLZqfmFpnZeF0cHzL |
||||
+08LODkVJJ9zjLdT7SA4vnne4FEbAxDbKAq5qkYzaL4UCAwEAAaOB0DCBzTAMBgNV |
||||
+HRMBAf8EAjAAMB0GA1UdDgQWBBSIWlXAUv9hzVKjNQ/qWpwkOCL3XDALBgNVHQ8E |
||||
+BAMCBeAwgZAGA1UdEQSBiDCBhYIeYWx0bnVsbC5weXRob24ub3JnAGV4YW1wbGUu |
||||
+Y29tgSBudWxsQHB5dGhvbi5vcmcAdXNlckBleGFtcGxlLm9yZ4YpaHR0cDovL251 |
||||
+bGwucHl0aG9uLm9yZwBodHRwOi8vZXhhbXBsZS5vcmeHBMAAAgGHECABDbgAAAAA |
||||
+AAAAAAAAAAEwDQYJKoZIhvcNAQEFBQADggEBAKxPRe99SaghcI6IWT7UNkJw9aO9 |
||||
+i9eo0Fj2MUqxpKbdb9noRDy2CnHWf7EIYZ1gznXPdwzSN4YCjV5d+Q9xtBaowT0j |
||||
+HPERs1ZuytCNNJTmhyqZ8q6uzMLoht4IqH/FBfpvgaeC5tBTnTT0rD5A/olXeimk |
||||
+kX4LxlEx5RAvpGB2zZVRGr6LobD9rVK91xuHYNIxxxfEGE8tCCWjp0+3ksri9SXx |
||||
+VHWBnbM9YaL32u3hxm8sYB/Yb8WSBavJCWJJqRStVRHM1koZlJmXNx2BX4vPo6iW |
||||
+RFEIPQsFZRLrtnCAiEhyT8bC2s/Njlu6ly9gtJZWSV46Q3ZjBL4q9sHKqZQ= |
||||
+-----END CERTIFICATE----- |
||||
diff -r 9ddc63c039ba Lib/test/test_ssl.py |
||||
--- a/Lib/test/test_ssl.py Sun Aug 11 13:04:50 2013 +0300 |
||||
+++ b/Lib/test/test_ssl.py Sun Aug 11 18:13:17 2013 +0200 |
||||
@@ -25,6 +25,7 @@ |
||||
HOST = test_support.HOST |
||||
CERTFILE = None |
||||
SVN_PYTHON_ORG_ROOT_CERT = None |
||||
+NULLBYTECERT = None |
||||
|
||||
def handle_error(prefix): |
||||
exc_format = ' '.join(traceback.format_exception(*sys.exc_info())) |
||||
@@ -123,6 +124,27 @@ |
||||
('DNS', 'projects.forum.nokia.com')) |
||||
) |
||||
|
||||
+ def test_parse_cert_CVE_2013_4073(self): |
||||
+ p = ssl._ssl._test_decode_cert(NULLBYTECERT) |
||||
+ if test_support.verbose: |
||||
+ sys.stdout.write("\n" + pprint.pformat(p) + "\n") |
||||
+ subject = ((('countryName', 'US'),), |
||||
+ (('stateOrProvinceName', 'Oregon'),), |
||||
+ (('localityName', 'Beaverton'),), |
||||
+ (('organizationName', 'Python Software Foundation'),), |
||||
+ (('organizationalUnitName', 'Python Core Development'),), |
||||
+ (('commonName', 'null.python.org\x00example.org'),), |
||||
+ (('emailAddress', 'python-dev@python.org'),)) |
||||
+ self.assertEqual(p['subject'], subject) |
||||
+ self.assertEqual(p['issuer'], subject) |
||||
+ self.assertEqual(p['subjectAltName'], |
||||
+ (('DNS', 'altnull.python.org\x00example.com'), |
||||
+ ('email', 'null@python.org\x00user@example.org'), |
||||
+ ('URI', 'http://null.python.org\x00http://example.org'), |
||||
+ ('IP Address', '192.0.2.1'), |
||||
+ ('IP Address', '2001:DB8:0:0:0:0:0:1\n')) |
||||
+ ) |
||||
+ |
||||
def test_DER_to_PEM(self): |
||||
with open(SVN_PYTHON_ORG_ROOT_CERT, 'r') as f: |
||||
pem = f.read() |
||||
@@ -1360,7 +1382,7 @@ |
||||
|
||||
|
||||
def test_main(verbose=False): |
||||
- global CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, NOKIACERT |
||||
+ global CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, NOKIACERT, NULLBYTECERT |
||||
CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir, |
||||
"keycert.pem") |
||||
SVN_PYTHON_ORG_ROOT_CERT = os.path.join( |
||||
@@ -1368,10 +1390,13 @@ |
||||
"https_svn_python_org_root.pem") |
||||
NOKIACERT = os.path.join(os.path.dirname(__file__) or os.curdir, |
||||
"nokia.pem") |
||||
+ NULLBYTECERT = os.path.join(os.path.dirname(__file__) or os.curdir, |
||||
+ "nullbytecert.pem") |
||||
|
||||
if (not os.path.exists(CERTFILE) or |
||||
not os.path.exists(SVN_PYTHON_ORG_ROOT_CERT) or |
||||
- not os.path.exists(NOKIACERT)): |
||||
+ not os.path.exists(NOKIACERT) or |
||||
+ not os.path.exists(NULLBYTECERT)): |
||||
raise test_support.TestFailed("Can't read certificate files!") |
||||
|
||||
tests = [BasicTests, BasicSocketTests] |
||||
diff -r 9ddc63c039ba Modules/_ssl.c |
||||
--- a/Modules/_ssl.c Sun Aug 11 13:04:50 2013 +0300 |
||||
+++ b/Modules/_ssl.c Sun Aug 11 18:13:17 2013 +0200 |
||||
@@ -741,8 +741,13 @@ |
||||
|
||||
/* get a rendering of each name in the set of names */ |
||||
|
||||
+ int gntype; |
||||
+ ASN1_STRING *as = NULL; |
||||
+ |
||||
name = sk_GENERAL_NAME_value(names, j); |
||||
- if (name->type == GEN_DIRNAME) { |
||||
+ gntype = name-> type; |
||||
+ switch (gntype) { |
||||
+ case GEN_DIRNAME: |
||||
|
||||
/* we special-case DirName as a tuple of tuples of attributes */ |
||||
|
||||
@@ -764,11 +769,61 @@ |
||||
goto fail; |
||||
} |
||||
PyTuple_SET_ITEM(t, 1, v); |
||||
+ break; |
||||
|
||||
- } else { |
||||
+ case GEN_EMAIL: |
||||
+ case GEN_DNS: |
||||
+ case GEN_URI: |
||||
+ /* GENERAL_NAME_print() doesn't handle NUL bytes in ASN1_string |
||||
+ correctly. */ |
||||
+ t = PyTuple_New(2); |
||||
+ if (t == NULL) |
||||
+ goto fail; |
||||
+ switch (gntype) { |
||||
+ case GEN_EMAIL: |
||||
+ v = PyUnicode_FromString("email"); |
||||
+ as = name->d.rfc822Name; |
||||
+ break; |
||||
+ case GEN_DNS: |
||||
+ v = PyUnicode_FromString("DNS"); |
||||
+ as = name->d.dNSName; |
||||
+ break; |
||||
+ case GEN_URI: |
||||
+ v = PyUnicode_FromString("URI"); |
||||
+ as = name->d.uniformResourceIdentifier; |
||||
+ break; |
||||
+ } |
||||
+ if (v == NULL) { |
||||
+ Py_DECREF(t); |
||||
+ goto fail; |
||||
+ } |
||||
+ PyTuple_SET_ITEM(t, 0, v); |
||||
+ v = PyString_FromStringAndSize((char *)ASN1_STRING_data(as), |
||||
+ ASN1_STRING_length(as)); |
||||
+ if (v == NULL) { |
||||
+ Py_DECREF(t); |
||||
+ goto fail; |
||||
+ } |
||||
+ PyTuple_SET_ITEM(t, 1, v); |
||||
+ break; |
||||
|
||||
+ default: |
||||
/* for everything else, we use the OpenSSL print form */ |
||||
- |
||||
+ switch (gntype) { |
||||
+ /* check for new general name type */ |
||||
+ case GEN_OTHERNAME: |
||||
+ case GEN_X400: |
||||
+ case GEN_EDIPARTY: |
||||
+ case GEN_IPADD: |
||||
+ case GEN_RID: |
||||
+ break; |
||||
+ default: |
||||
+ if (PyErr_Warn(PyExc_RuntimeWarning, |
||||
+ "Unknown general name type") == -1) { |
||||
+ goto fail; |
||||
+ } |
||||
+ break; |
||||
+ } |
||||
(void) BIO_reset(biobuf); |
||||
GENERAL_NAME_print(biobuf, name); |
||||
len = BIO_gets(biobuf, buf, sizeof(buf)-1); |
||||
@@ -794,6 +849,7 @@ |
||||
goto fail; |
||||
} |
||||
PyTuple_SET_ITEM(t, 1, v); |
||||
+ break; |
||||
} |
||||
|
||||
/* and add that rendering to the list */ |
@ -0,0 +1,11 @@
@@ -0,0 +1,11 @@
|
||||
--- Python-2.7.5-orig/Tools/gdb/libpython.py 2013-05-12 03:32:54.000000000 +0000 |
||||
+++ Python-2.7.5-orig/Tools/gdb/libpython.py 2013-09-15 09:56:25.494000000 +0000 |
||||
@@ -887,6 +887,8 @@ |
||||
newline character''' |
||||
if self.is_optimized_out(): |
||||
return '(frame information optimized out)' |
||||
+ if self.filename() == '<string>': |
||||
+ return '(in an eval block)' |
||||
with open(self.filename(), 'r') as f: |
||||
all_lines = f.readlines() |
||||
# Convert from 1-based current_line_num to 0-based list offset: |
@ -0,0 +1,207 @@
@@ -0,0 +1,207 @@
|
||||
--- Tools/gdb/libpython.py.orig 2013-10-09 10:54:59.894701668 +0200 |
||||
+++ Tools/gdb/libpython.py 2013-10-09 11:09:30.278703290 +0200 |
||||
@@ -1194,39 +1194,113 @@ |
||||
iter_frame = iter_frame.newer() |
||||
return index |
||||
|
||||
+ # We divide frames into: |
||||
+ # - "python frames": |
||||
+ # - "bytecode frames" i.e. PyEval_EvalFrameEx |
||||
+ # - "other python frames": things that are of interest from a python |
||||
+ # POV, but aren't bytecode (e.g. GC, GIL) |
||||
+ # - everything else |
||||
+ |
||||
+ def is_python_frame(self): |
||||
+ '''Is this a PyEval_EvalFrameEx frame, or some other important |
||||
+ frame? (see is_other_python_frame for what "important" means in this |
||||
+ context)''' |
||||
+ if self.is_evalframeex(): |
||||
+ return True |
||||
+ if self.is_other_python_frame(): |
||||
+ return True |
||||
+ return False |
||||
+ |
||||
def is_evalframeex(self): |
||||
- '''Is this a PyEval_EvalFrameEx frame?''' |
||||
- if self._gdbframe.name() == 'PyEval_EvalFrameEx': |
||||
- ''' |
||||
- I believe we also need to filter on the inline |
||||
- struct frame_id.inline_depth, only regarding frames with |
||||
- an inline depth of 0 as actually being this function |
||||
- |
||||
- So we reject those with type gdb.INLINE_FRAME |
||||
- ''' |
||||
- if self._gdbframe.type() == gdb.NORMAL_FRAME: |
||||
- # We have a PyEval_EvalFrameEx frame: |
||||
- return True |
||||
+ if self._gdbframe.function(): |
||||
+ if self._gdbframe.function().name == 'PyEval_EvalFrameEx': |
||||
+ ''' |
||||
+ I believe we also need to filter on the inline |
||||
+ struct frame_id.inline_depth, only regarding frames with |
||||
+ an inline depth of 0 as actually being this function |
||||
+ |
||||
+ So we reject those with type gdb.INLINE_FRAME |
||||
+ ''' |
||||
+ if self._gdbframe.type() == gdb.NORMAL_FRAME: |
||||
+ # We have a PyEval_EvalFrameEx frame: |
||||
+ return True |
||||
+ |
||||
+ return False |
||||
+ |
||||
+ def is_other_python_frame(self): |
||||
+ '''Is this frame worth displaying in python backtraces? |
||||
+ Examples: |
||||
+ - waiting on the GIL |
||||
+ - garbage-collecting |
||||
+ - within a CFunction |
||||
+ If it is, return a descriptive string |
||||
+ For other frames, return False |
||||
+ ''' |
||||
+ if self.is_waiting_for_gil(): |
||||
+ return 'Waiting for a lock (e.g. GIL)' |
||||
+ elif self.is_gc_collect(): |
||||
+ return 'Garbage-collecting' |
||||
+ else: |
||||
+ # Detect invocations of PyCFunction instances: |
||||
+ if self._gdbframe.name() == 'PyCFunction_Call': |
||||
+ try: |
||||
+ func = self._gdbframe.read_var('func') |
||||
+ # Use the prettyprinter for the func: |
||||
+ return str(func) |
||||
+ except RuntimeError: |
||||
+ return 'PyCFunction invocation (unable to read "func")' |
||||
+ older = self.older() |
||||
+ if older and older._gdbframe.name() == 'call_function': |
||||
+ # Within that frame: |
||||
+ # 'call_function' contains, amongst other things, a |
||||
+ # hand-inlined copy of PyCFunction_Call. |
||||
+ # "func" is the local containing the PyObject* of the |
||||
+ # callable instance |
||||
+ # Report it, but only if it's a PyCFunction (since otherwise |
||||
+ # we'd be reporting an implementation detail of every other |
||||
+ # function invocation) |
||||
+ try: |
||||
+ func = older._gdbframe.read_var('func') |
||||
+ funcobj = PyObjectPtr.from_pyobject_ptr(func) |
||||
+ if isinstance(funcobj, PyCFunctionObjectPtr): |
||||
+ # Use the prettyprinter for the func: |
||||
+ return str(func) |
||||
+ except RuntimeError: |
||||
+ return False |
||||
|
||||
+ # This frame isn't worth reporting: |
||||
return False |
||||
|
||||
+ def is_waiting_for_gil(self): |
||||
+ '''Is this frame waiting for a lock?''' |
||||
+ framename = self._gdbframe.name() |
||||
+ if framename: |
||||
+ return 'pthread_cond_timedwait' in framename or \ |
||||
+ 'PyThread_acquire_lock' in framename |
||||
+ |
||||
+ def is_gc_collect(self): |
||||
+ '''Is this frame "collect" within the the garbage-collector?''' |
||||
+ return self._gdbframe.name() == 'collect' |
||||
+ |
||||
def get_pyop(self): |
||||
try: |
||||
f = self._gdbframe.read_var('f') |
||||
- frame = PyFrameObjectPtr.from_pyobject_ptr(f) |
||||
- if not frame.is_optimized_out(): |
||||
- return frame |
||||
- # gdb is unable to get the "f" argument of PyEval_EvalFrameEx() |
||||
- # because it was "optimized out". Try to get "f" from the frame |
||||
- # of the caller, PyEval_EvalCodeEx(). |
||||
- orig_frame = frame |
||||
- caller = self._gdbframe.older() |
||||
- if caller: |
||||
- f = caller.read_var('f') |
||||
- frame = PyFrameObjectPtr.from_pyobject_ptr(f) |
||||
- if not frame.is_optimized_out(): |
||||
- return frame |
||||
- return orig_frame |
||||
+ obj = PyFrameObjectPtr.from_pyobject_ptr(f) |
||||
+ if isinstance(obj, PyFrameObjectPtr): |
||||
+ return obj |
||||
+ else: |
||||
+ return None |
||||
+ except ValueError: |
||||
+ return None |
||||
+ |
||||
+ def get_py_co(self): |
||||
+ try: |
||||
+ co = self._gdbframe.read_var('co') |
||||
+ obj = PyCodeObjectPtr.from_pyobject_ptr(co) |
||||
+ if isinstance(obj, PyCodeObjectPtr): |
||||
+ return obj |
||||
+ else: |
||||
+ return None |
||||
except ValueError: |
||||
return None |
||||
|
||||
@@ -1239,8 +1313,22 @@ |
||||
|
||||
@classmethod |
||||
def get_selected_python_frame(cls): |
||||
- '''Try to obtain the Frame for the python code in the selected frame, |
||||
- or None''' |
||||
+ '''Try to obtain the Frame for the python-related code in the selected |
||||
+ frame, or None''' |
||||
+ frame = cls.get_selected_frame() |
||||
+ |
||||
+ while frame: |
||||
+ if frame.is_python_frame(): |
||||
+ return frame |
||||
+ frame = frame.older() |
||||
+ |
||||
+ # Not found: |
||||
+ return None |
||||
+ |
||||
+ @classmethod |
||||
+ def get_selected_bytecode_frame(cls): |
||||
+ '''Try to obtain the Frame for the python bytecode interpreter in the |
||||
+ selected GDB frame, or None''' |
||||
frame = cls.get_selected_frame() |
||||
|
||||
while frame: |
||||
@@ -1265,7 +1353,11 @@ |
||||
else: |
||||
sys.stdout.write('#%i (unable to read python frame information)\n' % self.get_index()) |
||||
else: |
||||
- sys.stdout.write('#%i\n' % self.get_index()) |
||||
+ info = self.is_other_python_frame() |
||||
+ if info: |
||||
+ sys.stdout.write('#%i %s\n' % (self.get_index(), info)) |
||||
+ else: |
||||
+ sys.stdout.write('#%i\n' % self.get_index()) |
||||
|
||||
class PyList(gdb.Command): |
||||
'''List the current Python source code, if any |
||||
@@ -1301,7 +1393,7 @@ |
||||
if m: |
||||
start, end = map(int, m.groups()) |
||||
|
||||
- frame = Frame.get_selected_python_frame() |
||||
+ frame = Frame.get_selected_bytecode_frame() |
||||
if not frame: |
||||
print 'Unable to locate python frame' |
||||
return |
||||
@@ -1353,7 +1445,7 @@ |
||||
if not iter_frame: |
||||
break |
||||
|
||||
- if iter_frame.is_evalframeex(): |
||||
+ if iter_frame.is_python_frame(): |
||||
# Result: |
||||
if iter_frame.select(): |
||||
iter_frame.print_summary() |
||||
@@ -1407,7 +1499,7 @@ |
||||
def invoke(self, args, from_tty): |
||||
frame = Frame.get_selected_python_frame() |
||||
while frame: |
||||
- if frame.is_evalframeex(): |
||||
+ if frame.is_python_frame(): |
||||
frame.print_summary() |
||||
frame = frame.older() |
||||
|
@ -0,0 +1,21 @@
@@ -0,0 +1,21 @@
|
||||
diff -up Python-2.7.5/setup.py.orig Python-2.7.5/setup.py |
||||
--- Python-2.7.5/setup.py.orig 2013-11-07 01:36:18.853604232 +0100 |
||||
+++ Python-2.7.5/setup.py 2013-11-07 01:39:22.163305821 +0100 |
||||
@@ -1483,6 +1483,9 @@ class PyBuildExt(build_ext): |
||||
# Fredrik Lundh's cElementTree module. Note that this also |
||||
# uses expat (via the CAPI hook in pyexpat). |
||||
|
||||
+ # Add an explicit RPATH to _elementtree.so (rhbz#1019345) |
||||
+ EXPAT_RPATH = '/usr/lib64' if sys.maxint == 0x7fffffffffffffff else '/usr/lib' |
||||
+ |
||||
if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): |
||||
define_macros.append(('USE_PYEXPAT_CAPI', None)) |
||||
exts.append(Extension('_elementtree', |
||||
@@ -1492,6 +1495,7 @@ class PyBuildExt(build_ext): |
||||
sources = ['_elementtree.c'], |
||||
depends = ['pyexpat.c'] + expat_sources + |
||||
expat_depends, |
||||
+ extra_link_args = ['-Wl,-rpath,%s' % EXPAT_RPATH] |
||||
)) |
||||
else: |
||||
missing.append('_elementtree') |
File diff suppressed because one or more lines are too long
@ -0,0 +1,43 @@
@@ -0,0 +1,43 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Benjamin Peterson <benjamin@python.org> |
||||
# Date 1389671978 18000 |
||||
# Node ID 87673659d8f7ba1623cd4914f09ad3d2ade034e9 |
||||
# Parent 2631d33ee7fbd5f0288931ef37872218d511d2e8 |
||||
complain when nbytes > buflen to fix possible buffer overflow (closes #20246) |
||||
|
||||
diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py |
||||
--- a/Lib/test/test_socket.py |
||||
+++ b/Lib/test/test_socket.py |
||||
@@ -1620,6 +1620,16 @@ class BufferIOTest(SocketConnectedTest): |
||||
|
||||
_testRecvFromIntoMemoryview = _testRecvFromIntoArray |
||||
|
||||
+ def testRecvFromIntoSmallBuffer(self): |
||||
+ # See issue #20246. |
||||
+ buf = bytearray(8) |
||||
+ self.assertRaises(ValueError, self.cli_conn.recvfrom_into, buf, 1024) |
||||
+ |
||||
+ def _testRecvFromIntoSmallBuffer(self): |
||||
+ with test_support.check_py3k_warnings(): |
||||
+ buf = buffer(MSG*2048) |
||||
+ self.serv_conn.send(buf) |
||||
+ |
||||
|
||||
TIPC_STYPE = 2000 |
||||
TIPC_LOWER = 200 |
||||
diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c |
||||
--- a/Modules/socketmodule.c |
||||
+++ b/Modules/socketmodule.c |
||||
@@ -2742,6 +2742,10 @@ sock_recvfrom_into(PySocketSockObject *s |
||||
if (recvlen == 0) { |
||||
/* If nbytes was not specified, use the buffer's length */ |
||||
recvlen = buflen; |
||||
+ } else if (recvlen > buflen) { |
||||
+ PyErr_SetString(PyExc_ValueError, |
||||
+ "nbytes is greater than the length of the buffer"); |
||||
+ goto error; |
||||
} |
||||
|
||||
readlen = sock_recvfrom_guts(s, buf.buf, recvlen, flags, &addr); |
||||
|
@ -0,0 +1,19 @@
@@ -0,0 +1,19 @@
|
||||
--- Lib/test/test_gdb.py.orig 2014-08-01 14:30:43.397473152 +0200 |
||||
+++ Lib/test/test_gdb.py 2014-08-01 14:34:50.907325691 +0200 |
||||
@@ -135,6 +135,16 @@ |
||||
# Disable this: |
||||
'set print entry-values no', |
||||
|
||||
+ # The tests assume that the first frame of printed |
||||
+ # backtrace will not contain program counter, |
||||
+ # that is however not guaranteed by gdb (rhbz#1125657) |
||||
+ # therefore we need to use 'set print address off' to |
||||
+ # make sure the counter is not there. For example: |
||||
+ # #0 in PyObject_Print ... |
||||
+ # is assumed, but sometimes this can be e.g. |
||||
+ # #0 0x00003fffb7dd1798 in PyObject_Print ... |
||||
+ 'set print address off', |
||||
+ |
||||
'run'] |
||||
|
||||
if HAS_AUTOLOAD_SAFEPATH: |
@ -0,0 +1,216 @@
@@ -0,0 +1,216 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Richard Oudkerk <shibturn@gmail.com> |
||||
# Date 1372700728 -3600 |
||||
# Node ID bc34fe4a0d58a047509798acb0b4b2a21ce1e375 |
||||
# Parent 26ef5d5d5c3ea76ab411f2984d507aadce0ce8d7 |
||||
Issue #17097: Make multiprocessing ignore EINTR. |
||||
|
||||
diff --git a/Lib/multiprocessing/connection.py b/Lib/multiprocessing/connection.py |
||||
--- a/Lib/multiprocessing/connection.py |
||||
+++ b/Lib/multiprocessing/connection.py |
||||
@@ -270,7 +270,14 @@ class SocketListener(object): |
||||
self._unlink = None |
||||
|
||||
def accept(self): |
||||
- s, self._last_accepted = self._socket.accept() |
||||
+ while True: |
||||
+ try: |
||||
+ s, self._last_accepted = self._socket.accept() |
||||
+ except socket.error as e: |
||||
+ if e.args[0] != errno.EINTR: |
||||
+ raise |
||||
+ else: |
||||
+ break |
||||
s.setblocking(True) |
||||
fd = duplicate(s.fileno()) |
||||
conn = _multiprocessing.Connection(fd) |
||||
diff --git a/Lib/test/test_multiprocessing.py b/Lib/test/test_multiprocessing.py |
||||
--- a/Lib/test/test_multiprocessing.py |
||||
+++ b/Lib/test/test_multiprocessing.py |
||||
@@ -2461,12 +2461,80 @@ class TestForkAwareThreadLock(unittest.T |
||||
self.assertLessEqual(new_size, old_size) |
||||
|
||||
# |
||||
+# Issue #17097: EINTR should be ignored by recv(), send(), accept() etc |
||||
+# |
||||
+ |
||||
+class TestIgnoreEINTR(unittest.TestCase): |
||||
+ |
||||
+ @classmethod |
||||
+ def _test_ignore(cls, conn): |
||||
+ def handler(signum, frame): |
||||
+ pass |
||||
+ signal.signal(signal.SIGUSR1, handler) |
||||
+ conn.send('ready') |
||||
+ x = conn.recv() |
||||
+ conn.send(x) |
||||
+ conn.send_bytes(b'x'*(1024*1024)) # sending 1 MB should block |
||||
+ |
||||
+ @unittest.skipUnless(hasattr(signal, 'SIGUSR1'), 'requires SIGUSR1') |
||||
+ def test_ignore(self): |
||||
+ conn, child_conn = multiprocessing.Pipe() |
||||
+ try: |
||||
+ p = multiprocessing.Process(target=self._test_ignore, |
||||
+ args=(child_conn,)) |
||||
+ p.daemon = True |
||||
+ p.start() |
||||
+ child_conn.close() |
||||
+ self.assertEqual(conn.recv(), 'ready') |
||||
+ time.sleep(0.1) |
||||
+ os.kill(p.pid, signal.SIGUSR1) |
||||
+ time.sleep(0.1) |
||||
+ conn.send(1234) |
||||
+ self.assertEqual(conn.recv(), 1234) |
||||
+ time.sleep(0.1) |
||||
+ os.kill(p.pid, signal.SIGUSR1) |
||||
+ self.assertEqual(conn.recv_bytes(), b'x'*(1024*1024)) |
||||
+ time.sleep(0.1) |
||||
+ p.join() |
||||
+ finally: |
||||
+ conn.close() |
||||
+ |
||||
+ @classmethod |
||||
+ def _test_ignore_listener(cls, conn): |
||||
+ def handler(signum, frame): |
||||
+ pass |
||||
+ signal.signal(signal.SIGUSR1, handler) |
||||
+ l = multiprocessing.connection.Listener() |
||||
+ conn.send(l.address) |
||||
+ a = l.accept() |
||||
+ a.send('welcome') |
||||
+ |
||||
+ @unittest.skipUnless(hasattr(signal, 'SIGUSR1'), 'requires SIGUSR1') |
||||
+ def test_ignore_listener(self): |
||||
+ conn, child_conn = multiprocessing.Pipe() |
||||
+ try: |
||||
+ p = multiprocessing.Process(target=self._test_ignore_listener, |
||||
+ args=(child_conn,)) |
||||
+ p.daemon = True |
||||
+ p.start() |
||||
+ child_conn.close() |
||||
+ address = conn.recv() |
||||
+ time.sleep(0.1) |
||||
+ os.kill(p.pid, signal.SIGUSR1) |
||||
+ time.sleep(0.1) |
||||
+ client = multiprocessing.connection.Client(address) |
||||
+ self.assertEqual(client.recv(), 'welcome') |
||||
+ p.join() |
||||
+ finally: |
||||
+ conn.close() |
||||
+ |
||||
+# |
||||
# |
||||
# |
||||
|
||||
testcases_other = [OtherTest, TestInvalidHandle, TestInitializers, |
||||
TestStdinBadfiledescriptor, TestTimeouts, TestNoForkBomb, |
||||
- TestFlags, TestForkAwareThreadLock] |
||||
+ TestFlags, TestForkAwareThreadLock, TestIgnoreEINTR] |
||||
|
||||
# |
||||
# |
||||
diff --git a/Modules/_multiprocessing/socket_connection.c b/Modules/_multiprocessing/socket_connection.c |
||||
--- a/Modules/_multiprocessing/socket_connection.c |
||||
+++ b/Modules/_multiprocessing/socket_connection.c |
||||
@@ -23,6 +23,21 @@ |
||||
#endif |
||||
|
||||
/* |
||||
+ * Wrapper for PyErr_CheckSignals() which can be called without the GIL |
||||
+ */ |
||||
+ |
||||
+static int |
||||
+check_signals(void) |
||||
+{ |
||||
+ PyGILState_STATE state; |
||||
+ int res; |
||||
+ state = PyGILState_Ensure(); |
||||
+ res = PyErr_CheckSignals(); |
||||
+ PyGILState_Release(state); |
||||
+ return res; |
||||
+} |
||||
+ |
||||
+/* |
||||
* Send string to file descriptor |
||||
*/ |
||||
|
||||
@@ -34,8 +49,14 @@ static Py_ssize_t |
||||
|
||||
while (length > 0) { |
||||
res = WRITE(h, p, length); |
||||
- if (res < 0) |
||||
+ if (res < 0) { |
||||
+ if (errno == EINTR) { |
||||
+ if (check_signals() < 0) |
||||
+ return MP_EXCEPTION_HAS_BEEN_SET; |
||||
+ continue; |
||||
+ } |
||||
return MP_SOCKET_ERROR; |
||||
+ } |
||||
length -= res; |
||||
p += res; |
||||
} |
||||
@@ -56,12 +77,16 @@ static Py_ssize_t |
||||
|
||||
while (remaining > 0) { |
||||
temp = READ(h, p, remaining); |
||||
- if (temp <= 0) { |
||||
- if (temp == 0) |
||||
- return remaining == length ? |
||||
- MP_END_OF_FILE : MP_EARLY_END_OF_FILE; |
||||
- else |
||||
- return temp; |
||||
+ if (temp < 0) { |
||||
+ if (errno == EINTR) { |
||||
+ if (check_signals() < 0) |
||||
+ return MP_EXCEPTION_HAS_BEEN_SET; |
||||
+ continue; |
||||
+ } |
||||
+ return temp; |
||||
+ } |
||||
+ else if (temp == 0) { |
||||
+ return remaining == length ? MP_END_OF_FILE : MP_EARLY_END_OF_FILE; |
||||
} |
||||
remaining -= temp; |
||||
p += temp; |
||||
@@ -171,9 +196,16 @@ conn_poll(ConnectionObject *conn, double |
||||
p.revents = 0; |
||||
|
||||
if (timeout < 0) { |
||||
- res = poll(&p, 1, -1); |
||||
+ do { |
||||
+ res = poll(&p, 1, -1); |
||||
+ } while (res < 0 && errno == EINTR); |
||||
} else { |
||||
res = poll(&p, 1, (int)(timeout * 1000 + 0.5)); |
||||
+ if (res < 0 && errno == EINTR) { |
||||
+ /* We were interrupted by a signal. Just indicate a |
||||
+ timeout even though we are early. */ |
||||
+ return FALSE; |
||||
+ } |
||||
} |
||||
|
||||
if (res < 0) { |
||||
@@ -209,12 +241,19 @@ conn_poll(ConnectionObject *conn, double |
||||
FD_SET((SOCKET)conn->handle, &rfds); |
||||
|
||||
if (timeout < 0.0) { |
||||
- res = select((int)conn->handle+1, &rfds, NULL, NULL, NULL); |
||||
+ do { |
||||
+ res = select((int)conn->handle+1, &rfds, NULL, NULL, NULL); |
||||
+ } while (res < 0 && errno == EINTR); |
||||
} else { |
||||
struct timeval tv; |
||||
tv.tv_sec = (long)timeout; |
||||
tv.tv_usec = (long)((timeout - tv.tv_sec) * 1e6 + 0.5); |
||||
res = select((int)conn->handle+1, &rfds, NULL, NULL, &tv); |
||||
+ if (res < 0 && errno == EINTR) { |
||||
+ /* We were interrupted by a signal. Just indicate a |
||||
+ timeout even though we are early. */ |
||||
+ return FALSE; |
||||
+ } |
||||
} |
||||
|
||||
if (res < 0) { |
||||
|
@ -0,0 +1,288 @@
@@ -0,0 +1,288 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Antoine Pitrou <solipsis@pitrou.net> |
||||
# Date 1377898693 -7200 |
||||
# Node ID 43749cb6bdbd0fdab70f76cd171c3c02a3f600dd |
||||
# Parent ba54011aa295004ad87438211fe3bb1568dd69ab |
||||
Issue #18851: Avoid a double close of subprocess pipes when the child process fails starting. |
||||
|
||||
diff --git a/Lib/subprocess.py b/Lib/subprocess.py |
||||
--- a/Lib/subprocess.py |
||||
+++ b/Lib/subprocess.py |
||||
@@ -698,12 +698,12 @@ class Popen(object): |
||||
|
||||
(p2cread, p2cwrite, |
||||
c2pread, c2pwrite, |
||||
- errread, errwrite) = self._get_handles(stdin, stdout, stderr) |
||||
+ errread, errwrite), to_close = self._get_handles(stdin, stdout, stderr) |
||||
|
||||
try: |
||||
self._execute_child(args, executable, preexec_fn, close_fds, |
||||
cwd, env, universal_newlines, |
||||
- startupinfo, creationflags, shell, |
||||
+ startupinfo, creationflags, shell, to_close, |
||||
p2cread, p2cwrite, |
||||
c2pread, c2pwrite, |
||||
errread, errwrite) |
||||
@@ -711,18 +711,12 @@ class Popen(object): |
||||
# Preserve original exception in case os.close raises. |
||||
exc_type, exc_value, exc_trace = sys.exc_info() |
||||
|
||||
- to_close = [] |
||||
- # Only close the pipes we created. |
||||
- if stdin == PIPE: |
||||
- to_close.extend((p2cread, p2cwrite)) |
||||
- if stdout == PIPE: |
||||
- to_close.extend((c2pread, c2pwrite)) |
||||
- if stderr == PIPE: |
||||
- to_close.extend((errread, errwrite)) |
||||
- |
||||
for fd in to_close: |
||||
try: |
||||
- os.close(fd) |
||||
+ if mswindows: |
||||
+ fd.Close() |
||||
+ else: |
||||
+ os.close(fd) |
||||
except EnvironmentError: |
||||
pass |
||||
|
||||
@@ -816,8 +810,9 @@ class Popen(object): |
||||
"""Construct and return tuple with IO objects: |
||||
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite |
||||
""" |
||||
+ to_close = set() |
||||
if stdin is None and stdout is None and stderr is None: |
||||
- return (None, None, None, None, None, None) |
||||
+ return (None, None, None, None, None, None), to_close |
||||
|
||||
p2cread, p2cwrite = None, None |
||||
c2pread, c2pwrite = None, None |
||||
@@ -835,6 +830,10 @@ class Popen(object): |
||||
# Assuming file-like object |
||||
p2cread = msvcrt.get_osfhandle(stdin.fileno()) |
||||
p2cread = self._make_inheritable(p2cread) |
||||
+ # We just duplicated the handle, it has to be closed at the end |
||||
+ to_close.add(p2cread) |
||||
+ if stdin == PIPE: |
||||
+ to_close.add(p2cwrite) |
||||
|
||||
if stdout is None: |
||||
c2pwrite = _subprocess.GetStdHandle(_subprocess.STD_OUTPUT_HANDLE) |
||||
@@ -848,6 +847,10 @@ class Popen(object): |
||||
# Assuming file-like object |
||||
c2pwrite = msvcrt.get_osfhandle(stdout.fileno()) |
||||
c2pwrite = self._make_inheritable(c2pwrite) |
||||
+ # We just duplicated the handle, it has to be closed at the end |
||||
+ to_close.add(c2pwrite) |
||||
+ if stdout == PIPE: |
||||
+ to_close.add(c2pread) |
||||
|
||||
if stderr is None: |
||||
errwrite = _subprocess.GetStdHandle(_subprocess.STD_ERROR_HANDLE) |
||||
@@ -863,10 +866,14 @@ class Popen(object): |
||||
# Assuming file-like object |
||||
errwrite = msvcrt.get_osfhandle(stderr.fileno()) |
||||
errwrite = self._make_inheritable(errwrite) |
||||
+ # We just duplicated the handle, it has to be closed at the end |
||||
+ to_close.add(errwrite) |
||||
+ if stderr == PIPE: |
||||
+ to_close.add(errread) |
||||
|
||||
return (p2cread, p2cwrite, |
||||
c2pread, c2pwrite, |
||||
- errread, errwrite) |
||||
+ errread, errwrite), to_close |
||||
|
||||
|
||||
def _make_inheritable(self, handle): |
||||
@@ -895,7 +902,7 @@ class Popen(object): |
||||
|
||||
def _execute_child(self, args, executable, preexec_fn, close_fds, |
||||
cwd, env, universal_newlines, |
||||
- startupinfo, creationflags, shell, |
||||
+ startupinfo, creationflags, shell, to_close, |
||||
p2cread, p2cwrite, |
||||
c2pread, c2pwrite, |
||||
errread, errwrite): |
||||
@@ -934,6 +941,10 @@ class Popen(object): |
||||
# kill children. |
||||
creationflags |= _subprocess.CREATE_NEW_CONSOLE |
||||
|
||||
+ def _close_in_parent(fd): |
||||
+ fd.Close() |
||||
+ to_close.remove(fd) |
||||
+ |
||||
# Start the process |
||||
try: |
||||
hp, ht, pid, tid = _subprocess.CreateProcess(executable, args, |
||||
@@ -958,11 +969,11 @@ class Popen(object): |
||||
# pipe will not close when the child process exits and the |
||||
# ReadFile will hang. |
||||
if p2cread is not None: |
||||
- p2cread.Close() |
||||
+ _close_in_parent(p2cread) |
||||
if c2pwrite is not None: |
||||
- c2pwrite.Close() |
||||
+ _close_in_parent(c2pwrite) |
||||
if errwrite is not None: |
||||
- errwrite.Close() |
||||
+ _close_in_parent(errwrite) |
||||
|
||||
# Retain the process handle, but close the thread handle |
||||
self._child_created = True |
||||
@@ -1088,6 +1099,7 @@ class Popen(object): |
||||
"""Construct and return tuple with IO objects: |
||||
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite |
||||
""" |
||||
+ to_close = set() |
||||
p2cread, p2cwrite = None, None |
||||
c2pread, c2pwrite = None, None |
||||
errread, errwrite = None, None |
||||
@@ -1096,6 +1108,7 @@ class Popen(object): |
||||
pass |
||||
elif stdin == PIPE: |
||||
p2cread, p2cwrite = self.pipe_cloexec() |
||||
+ to_close.update((p2cread, p2cwrite)) |
||||
elif isinstance(stdin, int): |
||||
p2cread = stdin |
||||
else: |
||||
@@ -1106,6 +1119,7 @@ class Popen(object): |
||||
pass |
||||
elif stdout == PIPE: |
||||
c2pread, c2pwrite = self.pipe_cloexec() |
||||
+ to_close.update((c2pread, c2pwrite)) |
||||
elif isinstance(stdout, int): |
||||
c2pwrite = stdout |
||||
else: |
||||
@@ -1116,6 +1130,7 @@ class Popen(object): |
||||
pass |
||||
elif stderr == PIPE: |
||||
errread, errwrite = self.pipe_cloexec() |
||||
+ to_close.update((errread, errwrite)) |
||||
elif stderr == STDOUT: |
||||
errwrite = c2pwrite |
||||
elif isinstance(stderr, int): |
||||
@@ -1126,7 +1141,7 @@ class Popen(object): |
||||
|
||||
return (p2cread, p2cwrite, |
||||
c2pread, c2pwrite, |
||||
- errread, errwrite) |
||||
+ errread, errwrite), to_close |
||||
|
||||
|
||||
def _set_cloexec_flag(self, fd, cloexec=True): |
||||
@@ -1170,7 +1185,7 @@ class Popen(object): |
||||
|
||||
def _execute_child(self, args, executable, preexec_fn, close_fds, |
||||
cwd, env, universal_newlines, |
||||
- startupinfo, creationflags, shell, |
||||
+ startupinfo, creationflags, shell, to_close, |
||||
p2cread, p2cwrite, |
||||
c2pread, c2pwrite, |
||||
errread, errwrite): |
||||
@@ -1189,6 +1204,10 @@ class Popen(object): |
||||
if executable is None: |
||||
executable = args[0] |
||||
|
||||
+ def _close_in_parent(fd): |
||||
+ os.close(fd) |
||||
+ to_close.remove(fd) |
||||
+ |
||||
# For transferring possible exec failure from child to parent |
||||
# The first char specifies the exception type: 0 means |
||||
# OSError, 1 means some other error. |
||||
@@ -1283,17 +1302,17 @@ class Popen(object): |
||||
# be sure the FD is closed no matter what |
||||
os.close(errpipe_write) |
||||
|
||||
- if p2cread is not None and p2cwrite is not None: |
||||
- os.close(p2cread) |
||||
- if c2pwrite is not None and c2pread is not None: |
||||
- os.close(c2pwrite) |
||||
- if errwrite is not None and errread is not None: |
||||
- os.close(errwrite) |
||||
- |
||||
# Wait for exec to fail or succeed; possibly raising exception |
||||
# Exception limited to 1M |
||||
data = _eintr_retry_call(os.read, errpipe_read, 1048576) |
||||
finally: |
||||
+ if p2cread is not None and p2cwrite is not None: |
||||
+ _close_in_parent(p2cread) |
||||
+ if c2pwrite is not None and c2pread is not None: |
||||
+ _close_in_parent(c2pwrite) |
||||
+ if errwrite is not None and errread is not None: |
||||
+ _close_in_parent(errwrite) |
||||
+ |
||||
# be sure the FD is closed no matter what |
||||
os.close(errpipe_read) |
||||
|
||||
diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py |
||||
--- a/Lib/test/test_subprocess.py |
||||
+++ b/Lib/test/test_subprocess.py |
||||
@@ -14,6 +14,10 @@ try: |
||||
import resource |
||||
except ImportError: |
||||
resource = None |
||||
+try: |
||||
+ import threading |
||||
+except ImportError: |
||||
+ threading = None |
||||
|
||||
mswindows = (sys.platform == "win32") |
||||
|
||||
@@ -629,6 +633,36 @@ class ProcessTestCase(BaseTestCase): |
||||
if c.exception.errno not in (errno.ENOENT, errno.EACCES): |
||||
raise c.exception |
||||
|
||||
+ @unittest.skipIf(threading is None, "threading required") |
||||
+ def test_double_close_on_error(self): |
||||
+ # Issue #18851 |
||||
+ fds = [] |
||||
+ def open_fds(): |
||||
+ for i in range(20): |
||||
+ fds.extend(os.pipe()) |
||||
+ time.sleep(0.001) |
||||
+ t = threading.Thread(target=open_fds) |
||||
+ t.start() |
||||
+ try: |
||||
+ with self.assertRaises(EnvironmentError): |
||||
+ subprocess.Popen(['nonexisting_i_hope'], |
||||
+ stdin=subprocess.PIPE, |
||||
+ stdout=subprocess.PIPE, |
||||
+ stderr=subprocess.PIPE) |
||||
+ finally: |
||||
+ t.join() |
||||
+ exc = None |
||||
+ for fd in fds: |
||||
+ # If a double close occurred, some of those fds will |
||||
+ # already have been closed by mistake, and os.close() |
||||
+ # here will raise. |
||||
+ try: |
||||
+ os.close(fd) |
||||
+ except OSError as e: |
||||
+ exc = e |
||||
+ if exc is not None: |
||||
+ raise exc |
||||
+ |
||||
def test_handles_closed_on_exception(self): |
||||
# If CreateProcess exits with an error, ensure the |
||||
# duplicate output handles are released |
||||
@@ -783,7 +817,7 @@ class POSIXProcessTestCase(BaseTestCase) |
||||
|
||||
def _execute_child( |
||||
self, args, executable, preexec_fn, close_fds, cwd, env, |
||||
- universal_newlines, startupinfo, creationflags, shell, |
||||
+ universal_newlines, startupinfo, creationflags, shell, to_close, |
||||
p2cread, p2cwrite, |
||||
c2pread, c2pwrite, |
||||
errread, errwrite): |
||||
@@ -791,7 +825,7 @@ class POSIXProcessTestCase(BaseTestCase) |
||||
subprocess.Popen._execute_child( |
||||
self, args, executable, preexec_fn, close_fds, |
||||
cwd, env, universal_newlines, |
||||
- startupinfo, creationflags, shell, |
||||
+ startupinfo, creationflags, shell, to_close, |
||||
p2cread, p2cwrite, |
||||
c2pread, c2pwrite, |
||||
errread, errwrite) |
@ -0,0 +1,20 @@
@@ -0,0 +1,20 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Éric Araujo <merwok@netwok.org> |
||||
# Date 1394614885 14400 |
||||
# Node ID 677327810121891704491bafa6209af5b60ebc91 |
||||
# Parent 0f1237b61f58a77a159ab6e452782a8924ff2966 |
||||
Fix missing import in bdist_rpm (#18045) |
||||
|
||||
diff --git a/Lib/distutils/command/bdist_rpm.py b/Lib/distutils/command/bdist_rpm.py |
||||
--- a/Lib/distutils/command/bdist_rpm.py |
||||
+++ b/Lib/distutils/command/bdist_rpm.py |
||||
@@ -12,6 +12,7 @@ import string |
||||
from distutils.core import Command |
||||
from distutils.debug import DEBUG |
||||
from distutils.file_util import write_file |
||||
+from distutils.sysconfig import get_python_version |
||||
from distutils.errors import (DistutilsOptionError, DistutilsPlatformError, |
||||
DistutilsFileError, DistutilsExecError) |
||||
from distutils import log |
||||
|
@ -0,0 +1,166 @@
@@ -0,0 +1,166 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Victor Stinner <victor.stinner@gmail.com> |
||||
# Date 1406197344 -7200 |
||||
# Node ID 0177d8a4e82a613de0c64e747656c1d0b63e49b3 |
||||
# Parent e70ab72286b470b7209b91d3aa8a21953aafb78f |
||||
Issue #19884: readline: Disable the meta modifier key if stdout is not a |
||||
terminal to not write the ANSI sequence "\033[1034h" into stdout. This sequence |
||||
is used on some terminal (ex: TERM=xterm-256color") to enable support of 8 bit |
||||
characters. |
||||
|
||||
diff --git a/Lib/test/test_readline.py b/Lib/test/test_readline.py |
||||
--- a/Lib/test/test_readline.py |
||||
+++ b/Lib/test/test_readline.py |
||||
@@ -1,17 +1,19 @@ |
||||
""" |
||||
Very minimal unittests for parts of the readline module. |
||||
- |
||||
-These tests were added to check that the libedit emulation on OSX and |
||||
-the "real" readline have the same interface for history manipulation. That's |
||||
-why the tests cover only a small subset of the interface. |
||||
""" |
||||
+import os |
||||
import unittest |
||||
from test.test_support import run_unittest, import_module |
||||
+from test.script_helper import assert_python_ok |
||||
|
||||
# Skip tests if there is no readline module |
||||
readline = import_module('readline') |
||||
|
||||
class TestHistoryManipulation (unittest.TestCase): |
||||
+ """These tests were added to check that the libedit emulation on OSX and |
||||
+ the "real" readline have the same interface for history manipulation. |
||||
+ That's why the tests cover only a small subset of the interface. |
||||
+ """ |
||||
|
||||
@unittest.skipIf(not hasattr(readline, 'clear_history'), |
||||
"The history update test cannot be run because the " |
||||
@@ -40,8 +42,18 @@ class TestHistoryManipulation (unittest. |
||||
self.assertEqual(readline.get_current_history_length(), 1) |
||||
|
||||
|
||||
+class TestReadline(unittest.TestCase): |
||||
+ def test_init(self): |
||||
+ # Issue #19884: Ensure that the ANSI sequence "\033[1034h" is not |
||||
+ # written into stdout when the readline module is imported and stdout |
||||
+ # is redirected to a pipe. |
||||
+ rc, stdout, stderr = assert_python_ok('-c', 'import readline', |
||||
+ TERM='xterm-256color') |
||||
+ self.assertEqual(stdout, b'') |
||||
+ |
||||
+ |
||||
def test_main(): |
||||
- run_unittest(TestHistoryManipulation) |
||||
+ run_unittest(TestHistoryManipulation, TestReadline) |
||||
|
||||
if __name__ == "__main__": |
||||
test_main() |
||||
diff --git a/Modules/readline.c b/Modules/readline.c |
||||
--- a/Modules/readline.c |
||||
+++ b/Modules/readline.c |
||||
@@ -887,7 +887,7 @@ setup_readline(void) |
||||
#endif |
||||
|
||||
#ifdef __APPLE__ |
||||
- /* the libedit readline emulation resets key bindings etc |
||||
+ /* the libedit readline emulation resets key bindings etc |
||||
* when calling rl_initialize. So call it upfront |
||||
*/ |
||||
if (using_libedit_emulation) |
||||
@@ -932,6 +932,17 @@ setup_readline(void) |
||||
|
||||
begidx = PyInt_FromLong(0L); |
||||
endidx = PyInt_FromLong(0L); |
||||
+ |
||||
+ if (!isatty(STDOUT_FILENO)) { |
||||
+ /* Issue #19884: stdout is no a terminal. Disable meta modifier |
||||
+ keys to not write the ANSI sequence "\033[1034h" into stdout. On |
||||
+ terminals supporting 8 bit characters like TERM=xterm-256color |
||||
+ (which is now the default Fedora since Fedora 18), the meta key is |
||||
+ used to enable support of 8 bit characters (ANSI sequence |
||||
+ "\033[1034h"). */ |
||||
+ rl_variable_bind ("enable-meta-key", "off"); |
||||
+ } |
||||
+ |
||||
/* Initialize (allows .inputrc to override) |
||||
* |
||||
* XXX: A bug in the readline-2.2 library causes a memory leak |
||||
@@ -943,7 +954,7 @@ setup_readline(void) |
||||
else |
||||
#endif /* __APPLE__ */ |
||||
rl_initialize(); |
||||
- |
||||
+ |
||||
RESTORE_LOCALE(saved_locale) |
||||
} |
||||
|
||||
|
||||
|
||||
# HG changeset patch |
||||
# User Victor Stinner <victor.stinner@gmail.com> |
||||
# Date 1406232681 -7200 |
||||
# Node ID f0ab6f9f06036dfacff09f22f86464840b50eb0a |
||||
# Parent d422062d7d366386acdb81851b0f2ec3a6f6750c |
||||
Issue #19884, readline: calling rl_variable_bind ("enable-meta-key", "off") |
||||
does crash on Mac OS X which uses libedit instead of readline. |
||||
|
||||
diff --git a/Modules/readline.c b/Modules/readline.c |
||||
--- a/Modules/readline.c |
||||
+++ b/Modules/readline.c |
||||
@@ -933,15 +933,19 @@ setup_readline(void) |
||||
begidx = PyInt_FromLong(0L); |
||||
endidx = PyInt_FromLong(0L); |
||||
|
||||
+#ifndef __APPLE__ |
||||
if (!isatty(STDOUT_FILENO)) { |
||||
/* Issue #19884: stdout is no a terminal. Disable meta modifier |
||||
keys to not write the ANSI sequence "\033[1034h" into stdout. On |
||||
terminals supporting 8 bit characters like TERM=xterm-256color |
||||
(which is now the default Fedora since Fedora 18), the meta key is |
||||
used to enable support of 8 bit characters (ANSI sequence |
||||
- "\033[1034h"). */ |
||||
+ "\033[1034h"). |
||||
+ |
||||
+ With libedit, this call makes readline() crash. */ |
||||
rl_variable_bind ("enable-meta-key", "off"); |
||||
} |
||||
+#endif |
||||
|
||||
/* Initialize (allows .inputrc to override) |
||||
* |
||||
|
||||
|
||||
# HG changeset patch |
||||
# User Antoine Pitrou <solipsis@pitrou.net> |
||||
# Date 1415109130 -3600 |
||||
# Node ID eba6e68e818c694e499dfc4b22dde095d2557ab1 |
||||
# Parent e54d0b197c8245bd29ea09f421e2f1da47370f41 |
||||
Issue #22773: fix failing test with old readline versions due to issue #19884. |
||||
|
||||
diff --git a/Lib/test/test_readline.py b/Lib/test/test_readline.py |
||||
--- a/Lib/test/test_readline.py |
||||
+++ b/Lib/test/test_readline.py |
||||
@@ -43,6 +43,10 @@ class TestHistoryManipulation (unittest. |
||||
|
||||
|
||||
class TestReadline(unittest.TestCase): |
||||
+ |
||||
+ @unittest.skipIf(readline._READLINE_VERSION < 0x0600 |
||||
+ and "libedit" not in readline.__doc__, |
||||
+ "not supported in this library version") |
||||
def test_init(self): |
||||
# Issue #19884: Ensure that the ANSI sequence "\033[1034h" is not |
||||
# written into stdout when the readline module is imported and stdout |
||||
diff --git a/Modules/readline.c b/Modules/readline.c |
||||
--- a/Modules/readline.c |
||||
+++ b/Modules/readline.c |
||||
@@ -1184,4 +1184,7 @@ initreadline(void) |
||||
|
||||
PyOS_ReadlineFunctionPointer = call_readline; |
||||
setup_readline(); |
||||
+ |
||||
+ PyModule_AddIntConstant(m, "_READLINE_VERSION", RL_READLINE_VERSION); |
||||
+ PyModule_AddIntConstant(m, "_READLINE_RUNTIME_VERSION", rl_readline_version); |
||||
} |
||||
|
@ -0,0 +1,88 @@
@@ -0,0 +1,88 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Benjamin Peterson <benjamin@python.org> |
||||
# Date 1417828515 18000 |
||||
# Node ID d50096708b2d701937e78f525446d729fc28db88 |
||||
# Parent 923aac88a3cc76a95d5a04d9d3ece245147a8064 |
||||
add a default limit for the amount of data xmlrpclib.gzip_decode will return (closes #16043) |
||||
|
||||
diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py |
||||
--- a/Lib/test/test_xmlrpc.py |
||||
+++ b/Lib/test/test_xmlrpc.py |
||||
@@ -737,7 +737,7 @@ class GzipServerTestCase(BaseServerTestC |
||||
with cm: |
||||
p.pow(6, 8) |
||||
|
||||
- def test_gsip_response(self): |
||||
+ def test_gzip_response(self): |
||||
t = self.Transport() |
||||
p = xmlrpclib.ServerProxy(URL, transport=t) |
||||
old = self.requestHandler.encode_threshold |
||||
@@ -750,6 +750,23 @@ class GzipServerTestCase(BaseServerTestC |
||||
self.requestHandler.encode_threshold = old |
||||
self.assertTrue(a>b) |
||||
|
||||
+ def test_gzip_decode_limit(self): |
||||
+ max_gzip_decode = 20 * 1024 * 1024 |
||||
+ data = '\0' * max_gzip_decode |
||||
+ encoded = xmlrpclib.gzip_encode(data) |
||||
+ decoded = xmlrpclib.gzip_decode(encoded) |
||||
+ self.assertEqual(len(decoded), max_gzip_decode) |
||||
+ |
||||
+ data = '\0' * (max_gzip_decode + 1) |
||||
+ encoded = xmlrpclib.gzip_encode(data) |
||||
+ |
||||
+ with self.assertRaisesRegexp(ValueError, |
||||
+ "max gzipped payload length exceeded"): |
||||
+ xmlrpclib.gzip_decode(encoded) |
||||
+ |
||||
+ xmlrpclib.gzip_decode(encoded, max_decode=-1) |
||||
+ |
||||
+ |
||||
#Test special attributes of the ServerProxy object |
||||
class ServerProxyTestCase(unittest.TestCase): |
||||
def setUp(self): |
||||
diff --git a/Lib/xmlrpclib.py b/Lib/xmlrpclib.py |
||||
--- a/Lib/xmlrpclib.py |
||||
+++ b/Lib/xmlrpclib.py |
||||
@@ -49,6 +49,7 @@ |
||||
# 2003-07-12 gp Correct marshalling of Faults |
||||
# 2003-10-31 mvl Add multicall support |
||||
# 2004-08-20 mvl Bump minimum supported Python version to 2.1 |
||||
+# 2014-12-02 ch/doko Add workaround for gzip bomb vulnerability |
||||
# |
||||
# Copyright (c) 1999-2002 by Secret Labs AB. |
||||
# Copyright (c) 1999-2002 by Fredrik Lundh. |
||||
@@ -1165,10 +1166,13 @@ def gzip_encode(data): |
||||
# in the HTTP header, as described in RFC 1952 |
||||
# |
||||
# @param data The encoded data |
||||
+# @keyparam max_decode Maximum bytes to decode (20MB default), use negative |
||||
+# values for unlimited decoding |
||||
# @return the unencoded data |
||||
# @raises ValueError if data is not correctly coded. |
||||
+# @raises ValueError if max gzipped payload length exceeded |
||||
|
||||
-def gzip_decode(data): |
||||
+def gzip_decode(data, max_decode=20971520): |
||||
"""gzip encoded data -> unencoded data |
||||
|
||||
Decode data using the gzip content encoding as described in RFC 1952 |
||||
@@ -1178,11 +1182,16 @@ def gzip_decode(data): |
||||
f = StringIO.StringIO(data) |
||||
gzf = gzip.GzipFile(mode="rb", fileobj=f) |
||||
try: |
||||
- decoded = gzf.read() |
||||
+ if max_decode < 0: # no limit |
||||
+ decoded = gzf.read() |
||||
+ else: |
||||
+ decoded = gzf.read(max_decode + 1) |
||||
except IOError: |
||||
raise ValueError("invalid data") |
||||
f.close() |
||||
gzf.close() |
||||
+ if max_decode >= 0 and len(decoded) > max_decode: |
||||
+ raise ValueError("max gzipped payload length exceeded") |
||||
return decoded |
||||
|
||||
## |
@ -0,0 +1,52 @@
@@ -0,0 +1,52 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Benjamin Peterson <benjamin@python.org> |
||||
# Date 1397441438 14400 |
||||
# Node ID 50c07ed1743da9cd4540d83de0c30bd17aeb41b0 |
||||
# Parent 218e28a935ab4494d05215c243e2129625a71893 |
||||
in scan_once, prevent the reading of arbitrary memory when passed a negative index |
||||
|
||||
Bug reported by Guido Vranken. |
||||
|
||||
diff --git a/Lib/json/tests/test_decode.py b/Lib/json/tests/test_decode.py |
||||
--- a/Lib/json/tests/test_decode.py |
||||
+++ b/Lib/json/tests/test_decode.py |
||||
@@ -60,5 +60,10 @@ class TestDecode(object): |
||||
msg = 'escape' |
||||
self.assertRaisesRegexp(ValueError, msg, self.loads, s) |
||||
|
||||
+ def test_negative_index(self): |
||||
+ d = self.json.JSONDecoder() |
||||
+ self.assertRaises(ValueError, d.raw_decode, 'a'*42, -50000) |
||||
+ self.assertRaises(ValueError, d.raw_decode, u'a'*42, -50000) |
||||
+ |
||||
class TestPyDecode(TestDecode, PyTest): pass |
||||
class TestCDecode(TestDecode, CTest): pass |
||||
diff --git a/Modules/_json.c b/Modules/_json.c |
||||
--- a/Modules/_json.c |
||||
+++ b/Modules/_json.c |
||||
@@ -1468,7 +1468,10 @@ scan_once_str(PyScannerObject *s, PyObje |
||||
PyObject *res; |
||||
char *str = PyString_AS_STRING(pystr); |
||||
Py_ssize_t length = PyString_GET_SIZE(pystr); |
||||
- if (idx >= length) { |
||||
+ if (idx < 0) |
||||
+ /* Compatibility with the Python version. */ |
||||
+ idx += length; |
||||
+ if (idx < 0 || idx >= length) { |
||||
PyErr_SetNone(PyExc_StopIteration); |
||||
return NULL; |
||||
} |
||||
@@ -1555,7 +1558,10 @@ scan_once_unicode(PyScannerObject *s, Py |
||||
PyObject *res; |
||||
Py_UNICODE *str = PyUnicode_AS_UNICODE(pystr); |
||||
Py_ssize_t length = PyUnicode_GET_SIZE(pystr); |
||||
- if (idx >= length) { |
||||
+ if (idx < 0) |
||||
+ /* Compatibility with Python version. */ |
||||
+ idx += length; |
||||
+ if (idx < 0 || idx >= length) { |
||||
PyErr_SetNone(PyExc_StopIteration); |
||||
return NULL; |
||||
} |
||||
|
@ -0,0 +1,35 @@
@@ -0,0 +1,35 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Benjamin Peterson <benjamin@python.org> |
||||
# Date 1402796189 25200 |
||||
# Node ID b4bab078876811c7d95231d08aa6fa7142fdda66 |
||||
# Parent bb8b0c7fefd0c5ed99b3f336178a4f9554a1d0ef |
||||
url unquote the path before checking if it refers to a CGI script (closes #21766) |
||||
|
||||
diff --git a/Lib/CGIHTTPServer.py b/Lib/CGIHTTPServer.py |
||||
--- a/Lib/CGIHTTPServer.py |
||||
+++ b/Lib/CGIHTTPServer.py |
||||
@@ -84,7 +84,7 @@ class CGIHTTPRequestHandler(SimpleHTTPSe |
||||
path begins with one of the strings in self.cgi_directories |
||||
(and the next character is a '/' or the end of the string). |
||||
""" |
||||
- collapsed_path = _url_collapse_path(self.path) |
||||
+ collapsed_path = _url_collapse_path(urllib.unquote(self.path)) |
||||
dir_sep = collapsed_path.find('/', 1) |
||||
head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:] |
||||
if head in self.cgi_directories: |
||||
diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py |
||||
--- a/Lib/test/test_httpservers.py |
||||
+++ b/Lib/test/test_httpservers.py |
||||
@@ -510,6 +510,11 @@ class CGIHTTPServerTestCase(BaseTestCase |
||||
(res.read(), res.getheader('Content-type'), res.status)) |
||||
self.assertEqual(os.environ['SERVER_SOFTWARE'], signature) |
||||
|
||||
+ def test_urlquote_decoding_in_cgi_check(self): |
||||
+ res = self.request('/cgi-bin%2ffile1.py') |
||||
+ self.assertEqual((b'Hello World\n', 'text/html', 200), |
||||
+ (res.read(), res.getheader('Content-type'), res.status)) |
||||
+ |
||||
|
||||
class SimpleHTTPRequestHandlerTestCase(unittest.TestCase): |
||||
""" Test url parsing """ |
@ -0,0 +1,51 @@
@@ -0,0 +1,51 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Benjamin Peterson <benjamin@python.org> |
||||
# Date 1403579547 25200 |
||||
# Node ID 8d963c7db507be561e26bbbb852e3a2be3327c3f |
||||
# Parent 8e0b7393e921fb5e05c40265f9272dec90512ef6 |
||||
avoid overflow with large buffer sizes and/or offsets (closes #21831) |
||||
|
||||
diff --git a/Lib/test/test_buffer.py b/Lib/test/test_buffer.py |
||||
--- a/Lib/test/test_buffer.py |
||||
+++ b/Lib/test/test_buffer.py |
||||
@@ -4,6 +4,7 @@ For now, tests just new or changed funct |
||||
|
||||
""" |
||||
|
||||
+import sys |
||||
import unittest |
||||
from test import test_support |
||||
|
||||
@@ -29,6 +30,11 @@ class BufferTests(unittest.TestCase): |
||||
m = memoryview(b) # Should not raise an exception |
||||
self.assertEqual(m.tobytes(), s) |
||||
|
||||
+ def test_large_buffer_size_and_offset(self): |
||||
+ data = bytearray('hola mundo') |
||||
+ buf = buffer(data, sys.maxsize, sys.maxsize) |
||||
+ self.assertEqual(buf[:4096], "") |
||||
+ |
||||
|
||||
def test_main(): |
||||
with test_support.check_py3k_warnings(("buffer.. not supported", |
||||
diff --git a/Objects/bufferobject.c b/Objects/bufferobject.c |
||||
--- a/Objects/bufferobject.c |
||||
+++ b/Objects/bufferobject.c |
||||
@@ -88,7 +88,7 @@ get_buf(PyBufferObject *self, void **ptr |
||||
*size = count; |
||||
else |
||||
*size = self->b_size; |
||||
- if (offset + *size > count) |
||||
+ if (*size > count - offset) |
||||
*size = count - offset; |
||||
} |
||||
return 1; |
||||
@@ -875,4 +875,4 @@ PyTypeObject PyBuffer_Type = { |
||||
0, /* tp_init */ |
||||
0, /* tp_alloc */ |
||||
buffer_new, /* tp_new */ |
||||
-}; |
||||
\ No newline at end of file |
||||
+}; |
||||
|
@ -0,0 +1,108 @@
@@ -0,0 +1,108 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Barry Warsaw <barry@python.org> |
||||
# Date 1380582569 14400 |
||||
# Node ID 36680a7c0e22686df9c338a9ca3cdb2c60e05b27 |
||||
# Parent 0f5611bca5a284c0b5f978e83a05818f0907bda8# Parent 731abf7834c43efb321231e65e7dd76ad9e8e661 |
||||
- Issue #16040: CVE-2013-1752: nntplib: Limit maximum line lengths to 2048 to |
||||
prevent readline() calls from consuming too much memory. Patch by Jyrki |
||||
Pulliainen. |
||||
|
||||
diff --git a/Lib/nntplib.py b/Lib/nntplib.py |
||||
--- a/Lib/nntplib.py |
||||
+++ b/Lib/nntplib.py |
||||
@@ -37,6 +37,13 @@ import socket |
||||
"error_reply","error_temp","error_perm","error_proto", |
||||
"error_data",] |
||||
|
||||
+# maximal line length when calling readline(). This is to prevent |
||||
+# reading arbitrary lenght lines. RFC 3977 limits NNTP line length to |
||||
+# 512 characters, including CRLF. We have selected 2048 just to be on |
||||
+# the safe side. |
||||
+_MAXLINE = 2048 |
||||
+ |
||||
+ |
||||
# Exceptions raised when an error or invalid response is received |
||||
class NNTPError(Exception): |
||||
"""Base class for all nntplib exceptions""" |
||||
@@ -200,7 +207,9 @@ class NNTP: |
||||
def getline(self): |
||||
"""Internal: return one line from the server, stripping CRLF. |
||||
Raise EOFError if the connection is closed.""" |
||||
- line = self.file.readline() |
||||
+ line = self.file.readline(_MAXLINE + 1) |
||||
+ if len(line) > _MAXLINE: |
||||
+ raise NNTPDataError('line too long') |
||||
if self.debugging > 1: |
||||
print '*get*', repr(line) |
||||
if not line: raise EOFError |
||||
diff --git a/Lib/test/test_nntplib.py b/Lib/test/test_nntplib.py |
||||
new file mode 100644 |
||||
--- /dev/null |
||||
+++ b/Lib/test/test_nntplib.py |
||||
@@ -0,0 +1,65 @@ |
||||
+import socket |
||||
+import threading |
||||
+import nntplib |
||||
+import time |
||||
+ |
||||
+from unittest import TestCase |
||||
+from test import test_support |
||||
+ |
||||
+HOST = test_support.HOST |
||||
+ |
||||
+ |
||||
+def server(evt, serv, evil=False): |
||||
+ serv.listen(5) |
||||
+ try: |
||||
+ conn, addr = serv.accept() |
||||
+ except socket.timeout: |
||||
+ pass |
||||
+ else: |
||||
+ if evil: |
||||
+ conn.send("1 I'm too long response" * 3000 + "\n") |
||||
+ else: |
||||
+ conn.send("1 I'm OK response\n") |
||||
+ conn.close() |
||||
+ finally: |
||||
+ serv.close() |
||||
+ evt.set() |
||||
+ |
||||
+ |
||||
+class BaseServerTest(TestCase): |
||||
+ def setUp(self): |
||||
+ self.evt = threading.Event() |
||||
+ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
||||
+ self.sock.settimeout(3) |
||||
+ self.port = test_support.bind_port(self.sock) |
||||
+ threading.Thread( |
||||
+ target=server, |
||||
+ args=(self.evt, self.sock, self.evil)).start() |
||||
+ time.sleep(.1) |
||||
+ |
||||
+ def tearDown(self): |
||||
+ self.evt.wait() |
||||
+ |
||||
+ |
||||
+class ServerTests(BaseServerTest): |
||||
+ evil = False |
||||
+ |
||||
+ def test_basic_connect(self): |
||||
+ nntp = nntplib.NNTP('localhost', self.port) |
||||
+ nntp.sock.close() |
||||
+ |
||||
+ |
||||
+class EvilServerTests(BaseServerTest): |
||||
+ evil = True |
||||
+ |
||||
+ def test_too_long_line(self): |
||||
+ self.assertRaises(nntplib.NNTPDataError, |
||||
+ nntplib.NNTP, 'localhost', self.port) |
||||
+ |
||||
+ |
||||
+def test_main(verbose=None): |
||||
+ test_support.run_unittest(EvilServerTests) |
||||
+ test_support.run_unittest(ServerTests) |
||||
+ |
||||
+if __name__ == '__main__': |
||||
+ test_main() |
@ -0,0 +1,149 @@
@@ -0,0 +1,149 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Serhiy Storchaka <storchaka@gmail.com> |
||||
# Date 1382277427 -10800 |
||||
# Node ID 44ac81e6d584758ee56a865a7c18d82505be0643 |
||||
# Parent 625ece68d79a27d376889579c414ed4b2d8a2649 |
||||
Issue #16038: CVE-2013-1752: ftplib: Limit amount of data read by |
||||
limiting the call to readline(). Original patch by Michał |
||||
Jastrzębski and Giampaolo Rodola. |
||||
|
||||
diff --git a/Lib/ftplib.py b/Lib/ftplib.py |
||||
--- a/Lib/ftplib.py |
||||
+++ b/Lib/ftplib.py |
||||
@@ -55,6 +55,8 @@ MSG_OOB = 0x1 |
||||
|
||||
# The standard FTP server control port |
||||
FTP_PORT = 21 |
||||
+# The sizehint parameter passed to readline() calls |
||||
+MAXLINE = 8192 |
||||
|
||||
|
||||
# Exception raised when an error or invalid response is received |
||||
@@ -101,6 +103,7 @@ class FTP: |
||||
debugging = 0 |
||||
host = '' |
||||
port = FTP_PORT |
||||
+ maxline = MAXLINE |
||||
sock = None |
||||
file = None |
||||
welcome = None |
||||
@@ -180,7 +183,9 @@ class FTP: |
||||
# Internal: return one line from the server, stripping CRLF. |
||||
# Raise EOFError if the connection is closed |
||||
def getline(self): |
||||
- line = self.file.readline() |
||||
+ line = self.file.readline(self.maxline + 1) |
||||
+ if len(line) > self.maxline: |
||||
+ raise Error("got more than %d bytes" % self.maxline) |
||||
if self.debugging > 1: |
||||
print '*get*', self.sanitize(line) |
||||
if not line: raise EOFError |
||||
@@ -432,7 +437,9 @@ class FTP: |
||||
conn = self.transfercmd(cmd) |
||||
fp = conn.makefile('rb') |
||||
while 1: |
||||
- line = fp.readline() |
||||
+ line = fp.readline(self.maxline + 1) |
||||
+ if len(line) > self.maxline: |
||||
+ raise Error("got more than %d bytes" % self.maxline) |
||||
if self.debugging > 2: print '*retr*', repr(line) |
||||
if not line: |
||||
break |
||||
@@ -485,7 +492,9 @@ class FTP: |
||||
self.voidcmd('TYPE A') |
||||
conn = self.transfercmd(cmd) |
||||
while 1: |
||||
- buf = fp.readline() |
||||
+ buf = fp.readline(self.maxline + 1) |
||||
+ if len(buf) > self.maxline: |
||||
+ raise Error("got more than %d bytes" % self.maxline) |
||||
if not buf: break |
||||
if buf[-2:] != CRLF: |
||||
if buf[-1] in CRLF: buf = buf[:-1] |
||||
@@ -710,7 +719,9 @@ else: |
||||
fp = conn.makefile('rb') |
||||
try: |
||||
while 1: |
||||
- line = fp.readline() |
||||
+ line = fp.readline(self.maxline + 1) |
||||
+ if len(line) > self.maxline: |
||||
+ raise Error("got more than %d bytes" % self.maxline) |
||||
if self.debugging > 2: print '*retr*', repr(line) |
||||
if not line: |
||||
break |
||||
@@ -748,7 +759,9 @@ else: |
||||
conn = self.transfercmd(cmd) |
||||
try: |
||||
while 1: |
||||
- buf = fp.readline() |
||||
+ buf = fp.readline(self.maxline + 1) |
||||
+ if len(buf) > self.maxline: |
||||
+ raise Error("got more than %d bytes" % self.maxline) |
||||
if not buf: break |
||||
if buf[-2:] != CRLF: |
||||
if buf[-1] in CRLF: buf = buf[:-1] |
||||
@@ -905,7 +918,9 @@ class Netrc: |
||||
fp = open(filename, "r") |
||||
in_macro = 0 |
||||
while 1: |
||||
- line = fp.readline() |
||||
+ line = fp.readline(self.maxline + 1) |
||||
+ if len(line) > self.maxline: |
||||
+ raise Error("got more than %d bytes" % self.maxline) |
||||
if not line: break |
||||
if in_macro and line.strip(): |
||||
macro_lines.append(line) |
||||
diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py |
||||
--- a/Lib/test/test_ftplib.py |
||||
+++ b/Lib/test/test_ftplib.py |
||||
@@ -65,6 +65,7 @@ class DummyFTPHandler(asynchat.async_cha |
||||
self.last_received_data = '' |
||||
self.next_response = '' |
||||
self.rest = None |
||||
+ self.next_retr_data = RETR_DATA |
||||
self.push('220 welcome') |
||||
|
||||
def collect_incoming_data(self, data): |
||||
@@ -189,7 +190,7 @@ class DummyFTPHandler(asynchat.async_cha |
||||
offset = int(self.rest) |
||||
else: |
||||
offset = 0 |
||||
- self.dtp.push(RETR_DATA[offset:]) |
||||
+ self.dtp.push(self.next_retr_data[offset:]) |
||||
self.dtp.close_when_done() |
||||
self.rest = None |
||||
|
||||
@@ -203,6 +204,11 @@ class DummyFTPHandler(asynchat.async_cha |
||||
self.dtp.push(NLST_DATA) |
||||
self.dtp.close_when_done() |
||||
|
||||
+ def cmd_setlongretr(self, arg): |
||||
+ # For testing. Next RETR will return long line. |
||||
+ self.next_retr_data = 'x' * int(arg) |
||||
+ self.push('125 setlongretr ok') |
||||
+ |
||||
|
||||
class DummyFTPServer(asyncore.dispatcher, threading.Thread): |
||||
|
||||
@@ -558,6 +564,20 @@ class TestFTPClass(TestCase): |
||||
# IPv4 is in use, just make sure send_epsv has not been used |
||||
self.assertEqual(self.server.handler.last_received_cmd, 'pasv') |
||||
|
||||
+ def test_line_too_long(self): |
||||
+ self.assertRaises(ftplib.Error, self.client.sendcmd, |
||||
+ 'x' * self.client.maxline * 2) |
||||
+ |
||||
+ def test_retrlines_too_long(self): |
||||
+ self.client.sendcmd('SETLONGRETR %d' % (self.client.maxline * 2)) |
||||
+ received = [] |
||||
+ self.assertRaises(ftplib.Error, |
||||
+ self.client.retrlines, 'retr', received.append) |
||||
+ |
||||
+ def test_storlines_too_long(self): |
||||
+ f = StringIO.StringIO('x' * self.client.maxline * 2) |
||||
+ self.assertRaises(ftplib.Error, self.client.storlines, 'stor', f) |
||||
+ |
||||
|
||||
class TestIPv6Environment(TestCase): |
||||
|
@ -0,0 +1,51 @@
@@ -0,0 +1,51 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Berker Peksag <berker.peksag@gmail.com> |
||||
# Date 1407212157 -10800 |
||||
# Node ID 5e310c6a8520603bca8bc4b40eaf4f074db47c0d |
||||
# Parent 46c7a724b487295257423a69478392cb01ce74e6 |
||||
Issue #16037: HTTPMessage.readheaders() raises an HTTPException when more |
||||
than 100 headers are read. |
||||
|
||||
Patch by Jyrki Pulliainen and Daniel Eriksson. |
||||
|
||||
diff --git a/Lib/httplib.py b/Lib/httplib.py |
||||
--- a/Lib/httplib.py |
||||
+++ b/Lib/httplib.py |
||||
@@ -215,6 +215,10 @@ MAXAMOUNT = 1048576 |
||||
# maximal line length when calling readline(). |
||||
_MAXLINE = 65536 |
||||
|
||||
+# maximum amount of headers accepted |
||||
+_MAXHEADERS = 100 |
||||
+ |
||||
+ |
||||
class HTTPMessage(mimetools.Message): |
||||
|
||||
def addheader(self, key, value): |
||||
@@ -271,6 +275,8 @@ class HTTPMessage(mimetools.Message): |
||||
elif self.seekable: |
||||
tell = self.fp.tell |
||||
while True: |
||||
+ if len(hlist) > _MAXHEADERS: |
||||
+ raise HTTPException("got more than %d headers" % _MAXHEADERS) |
||||
if tell: |
||||
try: |
||||
startofline = tell() |
||||
diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py |
||||
--- a/Lib/test/test_httplib.py |
||||
+++ b/Lib/test/test_httplib.py |
||||
@@ -262,6 +262,13 @@ class BasicTest(TestCase): |
||||
if resp.read() != "": |
||||
self.fail("Did not expect response from HEAD request") |
||||
|
||||
+ def test_too_many_headers(self): |
||||
+ headers = '\r\n'.join('Header%d: foo' % i for i in xrange(200)) + '\r\n' |
||||
+ text = ('HTTP/1.1 200 OK\r\n' + headers) |
||||
+ s = FakeSocket(text) |
||||
+ r = httplib.HTTPResponse(s) |
||||
+ self.assertRaises(httplib.HTTPException, r.begin) |
||||
+ |
||||
def test_send_file(self): |
||||
expected = 'GET /foo HTTP/1.1\r\nHost: example.com\r\n' \ |
||||
'Accept-Encoding: identity\r\nContent-Length:' |
@ -0,0 +1,60 @@
@@ -0,0 +1,60 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Benjamin Peterson <benjamin@python.org> |
||||
# Date 1417827758 18000 |
||||
# Node ID 339f877cca115c1901f5dd93d7bc066031d2a669 |
||||
# Parent 54af094087953f4997a4ead63e949d845c4b4412 |
||||
in poplib, limit maximum line length that we read from the network (closes #16041) |
||||
|
||||
Patch from Berker Peksag. |
||||
|
||||
diff --git a/Lib/poplib.py b/Lib/poplib.py |
||||
--- a/Lib/poplib.py |
||||
+++ b/Lib/poplib.py |
||||
@@ -32,6 +32,12 @@ CR = '\r' |
||||
LF = '\n' |
||||
CRLF = CR+LF |
||||
|
||||
+# maximal line length when calling readline(). This is to prevent |
||||
+# reading arbitrary length lines. RFC 1939 limits POP3 line length to |
||||
+# 512 characters, including CRLF. We have selected 2048 just to be on |
||||
+# the safe side. |
||||
+_MAXLINE = 2048 |
||||
+ |
||||
|
||||
class POP3: |
||||
|
||||
@@ -103,7 +109,9 @@ class POP3: |
||||
# Raise error_proto('-ERR EOF') if the connection is closed. |
||||
|
||||
def _getline(self): |
||||
- line = self.file.readline() |
||||
+ line = self.file.readline(_MAXLINE + 1) |
||||
+ if len(line) > _MAXLINE: |
||||
+ raise error_proto('line too long') |
||||
if self._debugging > 1: print '*get*', repr(line) |
||||
if not line: raise error_proto('-ERR EOF') |
||||
octets = len(line) |
||||
@@ -365,6 +373,8 @@ else: |
||||
match = renewline.match(self.buffer) |
||||
while not match: |
||||
self._fillBuffer() |
||||
+ if len(self.buffer) > _MAXLINE: |
||||
+ raise error_proto('line too long') |
||||
match = renewline.match(self.buffer) |
||||
line = match.group(0) |
||||
self.buffer = renewline.sub('' ,self.buffer, 1) |
||||
diff --git a/Lib/test/test_poplib.py b/Lib/test/test_poplib.py |
||||
--- a/Lib/test/test_poplib.py |
||||
+++ b/Lib/test/test_poplib.py |
||||
@@ -198,6 +198,10 @@ class TestPOP3Class(TestCase): |
||||
113) |
||||
self.assertEqual(self.client.retr('foo'), expected) |
||||
|
||||
+ def test_too_long_lines(self): |
||||
+ self.assertRaises(poplib.error_proto, self.client._shortcmd, |
||||
+ 'echo +%s' % ((poplib._MAXLINE + 10) * 'a')) |
||||
+ |
||||
def test_dele(self): |
||||
self.assertOK(self.client.dele('foo')) |
||||
|
@ -0,0 +1,100 @@
@@ -0,0 +1,100 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Benjamin Peterson <benjamin@python.org> |
||||
# Date 1417827918 18000 |
||||
# Node ID 923aac88a3cc76a95d5a04d9d3ece245147a8064 |
||||
# Parent 339f877cca115c1901f5dd93d7bc066031d2a669 |
||||
smtplib: limit amount read from the network (closes #16042) |
||||
|
||||
diff --git a/Lib/smtplib.py b/Lib/smtplib.py |
||||
--- a/Lib/smtplib.py |
||||
+++ b/Lib/smtplib.py |
||||
@@ -57,6 +57,7 @@ from sys import stderr |
||||
SMTP_PORT = 25 |
||||
SMTP_SSL_PORT = 465 |
||||
CRLF = "\r\n" |
||||
+_MAXLINE = 8192 # more than 8 times larger than RFC 821, 4.5.3 |
||||
|
||||
OLDSTYLE_AUTH = re.compile(r"auth=(.*)", re.I) |
||||
|
||||
@@ -179,10 +180,14 @@ else: |
||||
def __init__(self, sslobj): |
||||
self.sslobj = sslobj |
||||
|
||||
- def readline(self): |
||||
+ def readline(self, size=-1): |
||||
+ if size < 0: |
||||
+ size = None |
||||
str = "" |
||||
chr = None |
||||
while chr != "\n": |
||||
+ if size is not None and len(str) >= size: |
||||
+ break |
||||
chr = self.sslobj.read(1) |
||||
if not chr: |
||||
break |
||||
@@ -353,7 +358,7 @@ class SMTP: |
||||
self.file = self.sock.makefile('rb') |
||||
while 1: |
||||
try: |
||||
- line = self.file.readline() |
||||
+ line = self.file.readline(_MAXLINE + 1) |
||||
except socket.error as e: |
||||
self.close() |
||||
raise SMTPServerDisconnected("Connection unexpectedly closed: " |
||||
@@ -363,6 +368,8 @@ class SMTP: |
||||
raise SMTPServerDisconnected("Connection unexpectedly closed") |
||||
if self.debuglevel > 0: |
||||
print>>stderr, 'reply:', repr(line) |
||||
+ if len(line) > _MAXLINE: |
||||
+ raise SMTPResponseException(500, "Line too long.") |
||||
resp.append(line[4:].strip()) |
||||
code = line[:3] |
||||
# Check that the error code is syntactically correct. |
||||
diff --git a/Lib/test/test_smtplib.py b/Lib/test/test_smtplib.py |
||||
--- a/Lib/test/test_smtplib.py |
||||
+++ b/Lib/test/test_smtplib.py |
||||
@@ -292,6 +292,33 @@ class BadHELOServerTests(unittest.TestCa |
||||
HOST, self.port, 'localhost', 3) |
||||
|
||||
|
||||
+@unittest.skipUnless(threading, 'Threading required for this test.') |
||||
+class TooLongLineTests(unittest.TestCase): |
||||
+ respdata = '250 OK' + ('.' * smtplib._MAXLINE * 2) + '\n' |
||||
+ |
||||
+ def setUp(self): |
||||
+ self.old_stdout = sys.stdout |
||||
+ self.output = StringIO.StringIO() |
||||
+ sys.stdout = self.output |
||||
+ |
||||
+ self.evt = threading.Event() |
||||
+ self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) |
||||
+ self.sock.settimeout(15) |
||||
+ self.port = test_support.bind_port(self.sock) |
||||
+ servargs = (self.evt, self.respdata, self.sock) |
||||
+ threading.Thread(target=server, args=servargs).start() |
||||
+ self.evt.wait() |
||||
+ self.evt.clear() |
||||
+ |
||||
+ def tearDown(self): |
||||
+ self.evt.wait() |
||||
+ sys.stdout = self.old_stdout |
||||
+ |
||||
+ def testLineTooLong(self): |
||||
+ self.assertRaises(smtplib.SMTPResponseException, smtplib.SMTP, |
||||
+ HOST, self.port, 'localhost', 3) |
||||
+ |
||||
+ |
||||
sim_users = {'Mr.A@somewhere.com':'John A', |
||||
'Ms.B@somewhere.com':'Sally B', |
||||
'Mrs.C@somewhereesle.com':'Ruth C', |
||||
@@ -526,7 +553,8 @@ class SMTPSimTests(unittest.TestCase): |
||||
def test_main(verbose=None): |
||||
test_support.run_unittest(GeneralTests, DebuggingServerTests, |
||||
NonConnectingTests, |
||||
- BadHELOServerTests, SMTPSimTests) |
||||
+ BadHELOServerTests, SMTPSimTests, |
||||
+ TooLongLineTests) |
||||
|
||||
if __name__ == '__main__': |
||||
test_main() |
@ -0,0 +1,59 @@
@@ -0,0 +1,59 @@
|
||||
|
||||
# HG changeset patch |
||||
# User R David Murray <rdmurray@bitdance.com> |
||||
# Date 1388775562 18000 |
||||
# Node ID dd906f4ab9237020a7a275c2d361fa288e553481 |
||||
# Parent 69b5f692455306c98aa27ecea17e6290787ebd3f |
||||
closes 16039: CVE-2013-1752: limit line length in imaplib readline calls. |
||||
|
||||
diff --git a/Lib/imaplib.py b/Lib/imaplib.py |
||||
--- a/Lib/imaplib.py |
||||
+++ b/Lib/imaplib.py |
||||
@@ -35,6 +35,15 @@ IMAP4_PORT = 143 |
||||
IMAP4_SSL_PORT = 993 |
||||
AllowedVersions = ('IMAP4REV1', 'IMAP4') # Most recent first |
||||
|
||||
+# Maximal line length when calling readline(). This is to prevent |
||||
+# reading arbitrary length lines. RFC 3501 and 2060 (IMAP 4rev1) |
||||
+# don't specify a line length. RFC 2683 however suggests limiting client |
||||
+# command lines to 1000 octets and server command lines to 8000 octets. |
||||
+# We have selected 10000 for some extra margin and since that is supposedly |
||||
+# also what UW and Panda IMAP does. |
||||
+_MAXLINE = 10000 |
||||
+ |
||||
+ |
||||
# Commands |
||||
|
||||
Commands = { |
||||
@@ -237,7 +246,10 @@ class IMAP4: |
||||
|
||||
def readline(self): |
||||
"""Read line from remote.""" |
||||
- return self.file.readline() |
||||
+ line = self.file.readline(_MAXLINE + 1) |
||||
+ if len(line) > _MAXLINE: |
||||
+ raise self.error("got more than %d bytes" % _MAXLINE) |
||||
+ return line |
||||
|
||||
|
||||
def send(self, data): |
||||
diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py |
||||
--- a/Lib/test/test_imaplib.py |
||||
+++ b/Lib/test/test_imaplib.py |
||||
@@ -165,6 +165,16 @@ class BaseThreadedNetworkedTests(unittes |
||||
self.imap_class, *server.server_address) |
||||
|
||||
|
||||
+ def test_linetoolong(self): |
||||
+ class TooLongHandler(SimpleIMAPHandler): |
||||
+ def handle(self): |
||||
+ # Send a very long response line |
||||
+ self.wfile.write('* OK ' + imaplib._MAXLINE*'x' + '\r\n') |
||||
+ |
||||
+ with self.reaped_server(TooLongHandler) as server: |
||||
+ self.assertRaises(imaplib.IMAP4.error, |
||||
+ self.imap_class, *server.server_address) |
||||
+ |
||||
class ThreadedNetworkedTests(BaseThreadedNetworkedTests): |
||||
|
||||
server_class = SocketServer.TCPServer |
@ -0,0 +1,80 @@
@@ -0,0 +1,80 @@
|
||||
diff --git a/Lib/sre_compile.py b/Lib/sre_compile.py |
||||
index 7cda2b6..15d2324 100644 |
||||
--- a/Lib/sre_compile.py |
||||
+++ b/Lib/sre_compile.py |
||||
@@ -355,8 +355,6 @@ def _optimize_unicode(charset, fixup): |
||||
def _simple(av): |
||||
# check if av is a "simple" operator |
||||
lo, hi = av[2].getwidth() |
||||
- if lo == 0 and hi == MAXREPEAT: |
||||
- raise error, "nothing to repeat" |
||||
return lo == hi == 1 and av[2][0][0] != SUBPATTERN |
||||
|
||||
def _compile_info(code, pattern, flags): |
||||
diff --git a/Lib/sre_parse.py b/Lib/sre_parse.py |
||||
index 75f8c96..644441d 100644 |
||||
--- a/Lib/sre_parse.py |
||||
+++ b/Lib/sre_parse.py |
||||
@@ -147,7 +147,7 @@ class SubPattern: |
||||
REPEATCODES = (MIN_REPEAT, MAX_REPEAT) |
||||
for op, av in self.data: |
||||
if op is BRANCH: |
||||
- i = sys.maxint |
||||
+ i = MAXREPEAT - 1 |
||||
j = 0 |
||||
for av in av[1]: |
||||
l, h = av.getwidth() |
||||
@@ -165,14 +165,14 @@ class SubPattern: |
||||
hi = hi + j |
||||
elif op in REPEATCODES: |
||||
i, j = av[2].getwidth() |
||||
- lo = lo + long(i) * av[0] |
||||
- hi = hi + long(j) * av[1] |
||||
+ lo = lo + i * av[0] |
||||
+ hi = hi + j * av[1] |
||||
elif op in UNITCODES: |
||||
lo = lo + 1 |
||||
hi = hi + 1 |
||||
elif op == SUCCESS: |
||||
break |
||||
- self.width = int(min(lo, sys.maxint)), int(min(hi, sys.maxint)) |
||||
+ self.width = min(lo, MAXREPEAT - 1), min(hi, MAXREPEAT) |
||||
return self.width |
||||
|
||||
class Tokenizer: |
||||
diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py |
||||
index 18a81a2..f0827d8 100644 |
||||
--- a/Lib/test/test_re.py |
||||
+++ b/Lib/test/test_re.py |
||||
@@ -897,6 +897,17 @@ class ReTests(unittest.TestCase): |
||||
with self.assertRaisesRegexp(sre_constants.error, '\?foo'): |
||||
re.compile('(?P<?foo>)') |
||||
|
||||
+ def test_bug_2537(self): |
||||
+ # issue 2537: empty submatches |
||||
+ for outer_op in ('{0,}', '*', '+', '{1,187}'): |
||||
+ for inner_op in ('{0,}', '*', '?'): |
||||
+ r = re.compile("^((x|y)%s)%s" % (inner_op, outer_op)) |
||||
+ m = r.match("xyyzy") |
||||
+ self.assertEqual(m.group(0), "xyy") |
||||
+ self.assertEqual(m.group(1), "") |
||||
+ self.assertEqual(m.group(2), "y") |
||||
+ |
||||
+ |
||||
|
||||
def run_re_tests(): |
||||
from test.re_tests import tests, SUCCEED, FAIL, SYNTAX_ERROR |
||||
diff --git a/Lib/doctest.py b/Lib/doctest.py |
||||
index 90bcca1..0ee40a2 100644 |
||||
--- a/Lib/doctest.py |
||||
+++ b/Lib/doctest.py |
||||
@@ -564,7 +564,7 @@ class DocTestParser: |
||||
# Want consists of any non-blank lines that do not start with PS1. |
||||
(?P<want> (?:(?![ ]*$) # Not a blank line |
||||
(?![ ]*>>>) # Not a line starting with PS1 |
||||
- .*$\n? # But any other line |
||||
+ .+$\n? # But any other line |
||||
)*) |
||||
''', re.MULTILINE | re.VERBOSE) |
||||
|
||||
|
@ -0,0 +1,54 @@
@@ -0,0 +1,54 @@
|
||||
diff --git a/Include/pyexpat.h b/Include/pyexpat.h |
||||
index 5340ef5..3fc5fa5 100644 |
||||
--- a/Include/pyexpat.h |
||||
+++ b/Include/pyexpat.h |
||||
@@ -3,7 +3,7 @@ |
||||
|
||||
/* note: you must import expat.h before importing this module! */ |
||||
|
||||
-#define PyExpat_CAPI_MAGIC "pyexpat.expat_CAPI 1.0" |
||||
+#define PyExpat_CAPI_MAGIC "pyexpat.expat_CAPI 1.1" |
||||
#define PyExpat_CAPSULE_NAME "pyexpat.expat_CAPI" |
||||
|
||||
struct PyExpat_CAPI |
||||
@@ -43,6 +43,8 @@ struct PyExpat_CAPI |
||||
XML_Parser parser, XML_UnknownEncodingHandler handler, |
||||
void *encodingHandlerData); |
||||
void (*SetUserData)(XML_Parser parser, void *userData); |
||||
+ /* might be none for expat < 2.1.0 */ |
||||
+ int (*SetHashSalt)(XML_Parser parser, unsigned long hash_salt); |
||||
/* always add new stuff to the end! */ |
||||
}; |
||||
|
||||
diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c |
||||
index 379aa01..ce62081 100644 |
||||
--- a/Modules/_elementtree.c |
||||
+++ b/Modules/_elementtree.c |
||||
@@ -2500,6 +2500,11 @@ xmlparser(PyObject* self_, PyObject* args, PyObject* kw) |
||||
PyErr_NoMemory(); |
||||
return NULL; |
||||
} |
||||
+ /* expat < 2.1.0 has no XML_SetHashSalt() */ |
||||
+ if (EXPAT(SetHashSalt) != NULL) { |
||||
+ EXPAT(SetHashSalt)(self->parser, |
||||
+ (unsigned long)_Py_HashSecret.prefix); |
||||
+ } |
||||
|
||||
/* setup target handlers */ |
||||
if (!target) { |
||||
diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c |
||||
index f269113..147b8a9 100644 |
||||
--- a/Modules/pyexpat.c |
||||
+++ b/Modules/pyexpat.c |
||||
@@ -2037,6 +2037,11 @@ MODULE_INITFUNC(void) |
||||
capi.SetProcessingInstructionHandler = XML_SetProcessingInstructionHandler; |
||||
capi.SetUnknownEncodingHandler = XML_SetUnknownEncodingHandler; |
||||
capi.SetUserData = XML_SetUserData; |
||||
+#if XML_COMBINED_VERSION >= 20100 |
||||
+ capi.SetHashSalt = XML_SetHashSalt; |
||||
+#else |
||||
+ capi.SetHashSalt = NULL; |
||||
+#endif |
||||
|
||||
/* export using capsule */ |
||||
capi_object = PyCapsule_New(&capi, PyExpat_CAPSULE_NAME, NULL); |
@ -0,0 +1,56 @@
@@ -0,0 +1,56 @@
|
||||
commit bcd39b7b9bd3a7f8a6a34410169794a6264a6fed |
||||
Author: Victor Stinner <vstinner@redhat.com> |
||||
Date: Wed Nov 7 00:45:13 2018 +0100 |
||||
|
||||
bpo-25083: Python can sometimes create incorrect .pyc files |
||||
|
||||
Python 2 never checked for I/O error when reading .py files and |
||||
thus could mistake an I/O error for EOF and create incorrect .pyc |
||||
files. This adds an check for this and aborts on an error. |
||||
|
||||
Patch by tzickel, commit f64c813de84011a84ca21d75a294861a9cc2dfdc. |
||||
|
||||
Resolves: rhbz#1629982 |
||||
|
||||
diff --git a/Include/errcode.h b/Include/errcode.h |
||||
index becec80..5c5a0f7 100644 |
||||
--- a/Include/errcode.h |
||||
+++ b/Include/errcode.h |
||||
@@ -29,6 +29,7 @@ extern "C" { |
||||
#define E_EOFS 23 /* EOF in triple-quoted string */ |
||||
#define E_EOLS 24 /* EOL in single-quoted string */ |
||||
#define E_LINECONT 25 /* Unexpected characters after a line continuation */ |
||||
+#define E_IO 26 /* I/O error */ |
||||
|
||||
#ifdef __cplusplus |
||||
} |
||||
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c |
||||
index ee6313b..0217f2b 100644 |
||||
--- a/Parser/tokenizer.c |
||||
+++ b/Parser/tokenizer.c |
||||
@@ -1644,6 +1644,11 @@ int |
||||
PyTokenizer_Get(struct tok_state *tok, char **p_start, char **p_end) |
||||
{ |
||||
int result = tok_get(tok, p_start, p_end); |
||||
+ if (tok->fp && ferror(tok->fp)) { |
||||
+ clearerr(tok->fp); |
||||
+ result = ERRORTOKEN; |
||||
+ tok->done = E_IO; |
||||
+ } |
||||
if (tok->decoding_erred) { |
||||
result = ERRORTOKEN; |
||||
tok->done = E_DECODE; |
||||
diff --git a/Python/pythonrun.c b/Python/pythonrun.c |
||||
index 0b73f3a..9f06236 100644 |
||||
--- a/Python/pythonrun.c |
||||
+++ b/Python/pythonrun.c |
||||
@@ -1643,6 +1643,9 @@ err_input(perrdetail *err) |
||||
Py_XDECREF(tb); |
||||
break; |
||||
} |
||||
+ case E_IO: |
||||
+ msg = "I/O error while reading"; |
||||
+ break; |
||||
case E_LINECONT: |
||||
msg = "unexpected character after line continuation character"; |
||||
break; |
@ -0,0 +1,100 @@
@@ -0,0 +1,100 @@
|
||||
commit 88a31ffeccce13192a474f4981b9cf6cfdfe065e |
||||
Author: Victor Stinner <vstinner@redhat.com> |
||||
Date: Wed Mar 20 17:43:20 2019 +0100 |
||||
|
||||
bpo-35746: Fix segfault in ssl's cert parser (GH-11569) |
||||
|
||||
Fix a NULL pointer deref in ssl module. The cert parser did not handle CRL |
||||
distribution points with empty DP or URI correctly. A malicious or buggy |
||||
certificate can result into segfault. |
||||
|
||||
Signed-off-by: Christian Heimes <christian@python.org> |
||||
|
||||
https://bugs.python.org/issue35746 |
||||
(cherry picked from commit a37f52436f9aa4b9292878b72f3ff1480e2606c3) |
||||
|
||||
Co-authored-by: Christian Heimes <christian@python.org> |
||||
|
||||
diff --git a/Lib/test/talos-2019-0758.pem b/Lib/test/talos-2019-0758.pem |
||||
new file mode 100644 |
||||
index 0000000..13b95a7 |
||||
--- /dev/null |
||||
+++ b/Lib/test/talos-2019-0758.pem |
||||
@@ -0,0 +1,22 @@ |
||||
+-----BEGIN CERTIFICATE----- |
||||
+MIIDqDCCApKgAwIBAgIBAjALBgkqhkiG9w0BAQswHzELMAkGA1UEBhMCVUsxEDAO |
||||
+BgNVBAMTB2NvZHktY2EwHhcNMTgwNjE4MTgwMDU4WhcNMjgwNjE0MTgwMDU4WjA7 |
||||
+MQswCQYDVQQGEwJVSzEsMCoGA1UEAxMjY29kZW5vbWljb24tdm0tMi50ZXN0Lmxh |
||||
+bC5jaXNjby5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC63fGB |
||||
+J80A9Av1GB0bptslKRIUtJm8EeEu34HkDWbL6AJY0P8WfDtlXjlPaLqFa6sqH6ES |
||||
+V48prSm1ZUbDSVL8R6BYVYpOlK8/48xk4pGTgRzv69gf5SGtQLwHy8UPBKgjSZoD |
||||
+5a5k5wJXGswhKFFNqyyxqCvWmMnJWxXTt2XDCiWc4g4YAWi4O4+6SeeHVAV9rV7C |
||||
+1wxqjzKovVe2uZOHjKEzJbbIU6JBPb6TRfMdRdYOw98n1VXDcKVgdX2DuuqjCzHP |
||||
+WhU4Tw050M9NaK3eXp4Mh69VuiKoBGOLSOcS8reqHIU46Reg0hqeL8LIL6OhFHIF |
||||
+j7HR6V1X6F+BfRS/AgMBAAGjgdYwgdMwCQYDVR0TBAIwADAdBgNVHQ4EFgQUOktp |
||||
+HQjxDXXUg8prleY9jeLKeQ4wTwYDVR0jBEgwRoAUx6zgPygZ0ZErF9sPC4+5e2Io |
||||
+UU+hI6QhMB8xCzAJBgNVBAYTAlVLMRAwDgYDVQQDEwdjb2R5LWNhggkA1QEAuwb7 |
||||
+2s0wCQYDVR0SBAIwADAuBgNVHREEJzAlgiNjb2Rlbm9taWNvbi12bS0yLnRlc3Qu |
||||
+bGFsLmNpc2NvLmNvbTAOBgNVHQ8BAf8EBAMCBaAwCwYDVR0fBAQwAjAAMAsGCSqG |
||||
+SIb3DQEBCwOCAQEAvqantx2yBlM11RoFiCfi+AfSblXPdrIrHvccepV4pYc/yO6p |
||||
+t1f2dxHQb8rWH3i6cWag/EgIZx+HJQvo0rgPY1BFJsX1WnYf1/znZpkUBGbVmlJr |
||||
+t/dW1gSkNS6sPsM0Q+7HPgEv8CPDNK5eo7vU2seE0iWOkxSyVUuiCEY9ZVGaLVit |
||||
+p0C78nZ35Pdv4I+1cosmHl28+es1WI22rrnmdBpH8J1eY6WvUw2xuZHLeNVN0TzV |
||||
+Q3qq53AaCWuLOD1AjESWuUCxMZTK9DPS4JKXTK8RLyDeqOvJGjsSWp3kL0y3GaQ+ |
||||
+10T1rfkKJub2+m9A9duin1fn6tHc2wSvB7m3DA== |
||||
+-----END CERTIFICATE----- |
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py |
||||
index f7a6746..31af578 100644 |
||||
--- a/Lib/test/test_ssl.py |
||||
+++ b/Lib/test/test_ssl.py |
||||
@@ -68,6 +68,7 @@ WRONGCERT = data_file("XXXnonexisting.pem") |
||||
BADKEY = data_file("badkey.pem") |
||||
NOKIACERT = data_file("nokia.pem") |
||||
NULLBYTECERT = data_file("nullbytecert.pem") |
||||
+TALOS_INVALID_CRLDP = data_file("talos-2019-0758.pem") |
||||
|
||||
DHFILE = data_file("dh1024.pem") |
||||
BYTES_DHFILE = DHFILE.encode(sys.getfilesystemencoding()) |
||||
@@ -238,6 +239,27 @@ class BasicSocketTests(unittest.TestCase): |
||||
('IP Address', '2001:DB8:0:0:0:0:0:1\n')) |
||||
) |
||||
|
||||
+ def test_parse_cert_CVE_2019_5010(self): |
||||
+ p = ssl._ssl._test_decode_cert(TALOS_INVALID_CRLDP) |
||||
+ if support.verbose: |
||||
+ sys.stdout.write("\n" + pprint.pformat(p) + "\n") |
||||
+ self.assertEqual( |
||||
+ p, |
||||
+ { |
||||
+ 'issuer': ( |
||||
+ (('countryName', 'UK'),), (('commonName', 'cody-ca'),)), |
||||
+ 'notAfter': 'Jun 14 18:00:58 2028 GMT', |
||||
+ 'notBefore': 'Jun 18 18:00:58 2018 GMT', |
||||
+ 'serialNumber': '02', |
||||
+ 'subject': ((('countryName', 'UK'),), |
||||
+ (('commonName', |
||||
+ 'codenomicon-vm-2.test.lal.cisco.com'),)), |
||||
+ 'subjectAltName': ( |
||||
+ ('DNS', 'codenomicon-vm-2.test.lal.cisco.com'),), |
||||
+ 'version': 3 |
||||
+ } |
||||
+ ) |
||||
+ |
||||
def test_parse_all_sans(self): |
||||
p = ssl._ssl._test_decode_cert(ALLSANFILE) |
||||
self.assertEqual(p['subjectAltName'], |
||||
diff --git a/Modules/_ssl.c b/Modules/_ssl.c |
||||
index 6220bea..baea6e1 100644 |
||||
--- a/Modules/_ssl.c |
||||
+++ b/Modules/_ssl.c |
||||
@@ -1103,6 +1103,10 @@ _get_crl_dp(X509 *certificate) { |
||||
STACK_OF(GENERAL_NAME) *gns; |
||||
|
||||
dp = sk_DIST_POINT_value(dps, i); |
||||
+ if (dp->distpoint == NULL) { |
||||
+ /* Ignore empty DP value, CVE-2019-5010 */ |
||||
+ continue; |
||||
+ } |
||||
gns = dp->distpoint->name.fullname; |
||||
|
||||
for (j=0; j < sk_GENERAL_NAME_num(gns); j++) { |
@ -0,0 +1,156 @@
@@ -0,0 +1,156 @@
|
||||
diff --git a/Doc/library/urlparse.rst b/Doc/library/urlparse.rst |
||||
index efd112d..61022f7 100644 |
||||
--- a/Doc/library/urlparse.rst |
||||
+++ b/Doc/library/urlparse.rst |
||||
@@ -118,6 +118,12 @@ The :mod:`urlparse` module defines the following functions: |
||||
See section :ref:`urlparse-result-object` for more information on the result |
||||
object. |
||||
|
||||
+ Characters in the :attr:`netloc` attribute that decompose under NFKC |
||||
+ normalization (as used by the IDNA encoding) into any of ``/``, ``?``, |
||||
+ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is |
||||
+ decomposed before parsing, or is not a Unicode string, no error will be |
||||
+ raised. |
||||
+ |
||||
.. versionchanged:: 2.5 |
||||
Added attributes to return value. |
||||
|
||||
@@ -125,6 +131,11 @@ The :mod:`urlparse` module defines the following functions: |
||||
Added IPv6 URL parsing capabilities. |
||||
|
||||
|
||||
+ .. versionchanged:: 2.7.17 |
||||
+ Characters that affect netloc parsing under NFKC normalization will |
||||
+ now raise :exc:`ValueError`. |
||||
+ |
||||
+ |
||||
.. function:: parse_qs(qs[, keep_blank_values[, strict_parsing]]) |
||||
|
||||
Parse a query string given as a string argument (data of type |
||||
@@ -219,11 +230,21 @@ The :mod:`urlparse` module defines the following functions: |
||||
See section :ref:`urlparse-result-object` for more information on the result |
||||
object. |
||||
|
||||
+ Characters in the :attr:`netloc` attribute that decompose under NFKC |
||||
+ normalization (as used by the IDNA encoding) into any of ``/``, ``?``, |
||||
+ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is |
||||
+ decomposed before parsing, or is not a Unicode string, no error will be |
||||
+ raised. |
||||
+ |
||||
.. versionadded:: 2.2 |
||||
|
||||
.. versionchanged:: 2.5 |
||||
Added attributes to return value. |
||||
|
||||
+ .. versionchanged:: 2.7.17 |
||||
+ Characters that affect netloc parsing under NFKC normalization will |
||||
+ now raise :exc:`ValueError`. |
||||
+ |
||||
|
||||
.. function:: urlunsplit(parts) |
||||
|
||||
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py |
||||
index 72ebfaa..2717163 100644 |
||||
--- a/Lib/test/test_urlparse.py |
||||
+++ b/Lib/test/test_urlparse.py |
||||
@@ -1,6 +1,8 @@ |
||||
#! /usr/bin/env python |
||||
|
||||
from test import test_support |
||||
+import sys |
||||
+import unicodedata |
||||
import unittest |
||||
import urlparse |
||||
|
||||
@@ -564,6 +566,45 @@ class UrlParseTestCase(unittest.TestCase): |
||||
self.assertEqual(urlparse.urlparse("http://www.python.org:80"), |
||||
('http','www.python.org:80','','','','')) |
||||
|
||||
+ def test_urlsplit_normalization(self): |
||||
+ # Certain characters should never occur in the netloc, |
||||
+ # including under normalization. |
||||
+ # Ensure that ALL of them are detected and cause an error |
||||
+ illegal_chars = u'/:#?@' |
||||
+ hex_chars = {'{:04X}'.format(ord(c)) for c in illegal_chars} |
||||
+ denorm_chars = [ |
||||
+ c for c in map(unichr, range(128, sys.maxunicode)) |
||||
+ if (hex_chars & set(unicodedata.decomposition(c).split())) |
||||
+ and c not in illegal_chars |
||||
+ ] |
||||
+ # Sanity check that we found at least one such character |
||||
+ self.assertIn(u'\u2100', denorm_chars) |
||||
+ self.assertIn(u'\uFF03', denorm_chars) |
||||
+ |
||||
+ # bpo-36742: Verify port separators are ignored when they |
||||
+ # existed prior to decomposition |
||||
+ urlparse.urlsplit(u'http://\u30d5\u309a:80') |
||||
+ with self.assertRaises(ValueError): |
||||
+ urlparse.urlsplit(u'http://\u30d5\u309a\ufe1380') |
||||
+ |
||||
+ for scheme in [u"http", u"https", u"ftp"]: |
||||
+ for netloc in [u"netloc{}false.netloc", u"n{}user@netloc"]: |
||||
+ for c in denorm_chars: |
||||
+ url = u"{}://{}/path".format(scheme, netloc.format(c)) |
||||
+ if test_support.verbose: |
||||
+ print "Checking %r" % url |
||||
+ with self.assertRaises(ValueError): |
||||
+ urlparse.urlsplit(url) |
||||
+ |
||||
+ # check error message: invalid netloc must be formated with repr() |
||||
+ # to get an ASCII error message |
||||
+ with self.assertRaises(ValueError) as cm: |
||||
+ urlparse.urlsplit(u'http://example.com\uFF03@bing.com') |
||||
+ self.assertEqual(str(cm.exception), |
||||
+ "netloc u'example.com\\uff03@bing.com' contains invalid characters " |
||||
+ "under NFKC normalization") |
||||
+ self.assertIsInstance(cm.exception.args[0], str) |
||||
+ |
||||
def test_main(): |
||||
test_support.run_unittest(UrlParseTestCase) |
||||
|
||||
diff --git a/Lib/urlparse.py b/Lib/urlparse.py |
||||
index 4ce982e..9a1df74 100644 |
||||
--- a/Lib/urlparse.py |
||||
+++ b/Lib/urlparse.py |
||||
@@ -164,6 +164,25 @@ def _splitnetloc(url, start=0): |
||||
delim = min(delim, wdelim) # use earliest delim position |
||||
return url[start:delim], url[delim:] # return (domain, rest) |
||||
|
||||
+def _checknetloc(netloc): |
||||
+ if not netloc or not isinstance(netloc, unicode): |
||||
+ return |
||||
+ # looking for characters like \u2100 that expand to 'a/c' |
||||
+ # IDNA uses NFKC equivalence, so normalize for this check |
||||
+ import unicodedata |
||||
+ n = netloc.replace(u'@', u'') # ignore characters already included |
||||
+ n = n.replace(u':', u'') # but not the surrounding text |
||||
+ n = n.replace(u'#', u'') |
||||
+ n = n.replace(u'?', u'') |
||||
+ netloc2 = unicodedata.normalize('NFKC', n) |
||||
+ if n == netloc2: |
||||
+ return |
||||
+ for c in '/?#@:': |
||||
+ if c in netloc2: |
||||
+ raise ValueError("netloc %r contains invalid characters " |
||||
+ "under NFKC normalization" |
||||
+ % netloc) |
||||
+ |
||||
def urlsplit(url, scheme='', allow_fragments=True): |
||||
"""Parse a URL into 5 components: |
||||
<scheme>://<netloc>/<path>?<query>#<fragment> |
||||
@@ -192,6 +211,7 @@ def urlsplit(url, scheme='', allow_fragments=True): |
||||
url, fragment = url.split('#', 1) |
||||
if '?' in url: |
||||
url, query = url.split('?', 1) |
||||
+ _checknetloc(netloc) |
||||
v = SplitResult(scheme, netloc, url, query, fragment) |
||||
_parse_cache[key] = v |
||||
return v |
||||
@@ -215,6 +235,7 @@ def urlsplit(url, scheme='', allow_fragments=True): |
||||
url, fragment = url.split('#', 1) |
||||
if '?' in url: |
||||
url, query = url.split('?', 1) |
||||
+ _checknetloc(netloc) |
||||
v = SplitResult(scheme, netloc, url, query, fragment) |
||||
_parse_cache[key] = v |
||||
return v |
@ -0,0 +1,222 @@
@@ -0,0 +1,222 @@
|
||||
diff --git a/Lib/httplib.py b/Lib/httplib.py |
||||
index da2f346..fc8e895 100644 |
||||
--- a/Lib/httplib.py |
||||
+++ b/Lib/httplib.py |
||||
@@ -247,6 +247,15 @@ _MAXHEADERS = 100 |
||||
_is_legal_header_name = re.compile(r'\A[^:\s][^:\r\n]*\Z').match |
||||
_is_illegal_header_value = re.compile(r'\n(?![ \t])|\r(?![ \t\n])').search |
||||
|
||||
+# These characters are not allowed within HTTP URL paths. |
||||
+# See https://tools.ietf.org/html/rfc3986#section-3.3 and the |
||||
+# https://tools.ietf.org/html/rfc3986#appendix-A pchar definition. |
||||
+# Prevents CVE-2019-9740. Includes control characters such as \r\n. |
||||
+# Restrict non-ASCII characters above \x7f (0x80-0xff). |
||||
+_contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f-\xff]') |
||||
+# Arguably only these _should_ allowed: |
||||
+# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$") |
||||
+# We are more lenient for assumed real world compatibility purposes. |
||||
|
||||
class HTTPMessage(mimetools.Message): |
||||
|
||||
@@ -926,6 +935,12 @@ class HTTPConnection: |
||||
self._method = method |
||||
if not url: |
||||
url = '/' |
||||
+ # Prevent CVE-2019-9740. |
||||
+ match = _contains_disallowed_url_pchar_re.search(url) |
||||
+ if match: |
||||
+ raise InvalidURL("URL can't contain control characters. %r " |
||||
+ "(found at least %r)" |
||||
+ % (url, match.group())) |
||||
hdr = '%s %s %s' % (method, url, self._http_vsn_str) |
||||
|
||||
self._output(hdr) |
||||
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py |
||||
index 3845012..d2da0f8 100644 |
||||
--- a/Lib/test/test_urllib.py |
||||
+++ b/Lib/test/test_urllib.py |
||||
@@ -198,6 +198,31 @@ class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin): |
||||
finally: |
||||
self.unfakehttp() |
||||
|
||||
+ def test_url_with_control_char_rejected(self): |
||||
+ for char_no in range(0, 0x21) + range(0x7f, 0x100): |
||||
+ char = chr(char_no) |
||||
+ schemeless_url = "//localhost:7777/test%s/" % char |
||||
+ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") |
||||
+ try: |
||||
+ # urllib quotes the URL so there is no injection. |
||||
+ resp = urllib.urlopen("http:" + schemeless_url) |
||||
+ self.assertNotIn(char, resp.geturl()) |
||||
+ finally: |
||||
+ self.unfakehttp() |
||||
+ |
||||
+ def test_url_with_newline_header_injection_rejected(self): |
||||
+ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") |
||||
+ host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123" |
||||
+ schemeless_url = "//" + host + ":8080/test/?test=a" |
||||
+ try: |
||||
+ # urllib quotes the URL so there is no injection. |
||||
+ resp = urllib.urlopen("http:" + schemeless_url) |
||||
+ self.assertNotIn(' ', resp.geturl()) |
||||
+ self.assertNotIn('\r', resp.geturl()) |
||||
+ self.assertNotIn('\n', resp.geturl()) |
||||
+ finally: |
||||
+ self.unfakehttp() |
||||
+ |
||||
def test_read_bogus(self): |
||||
# urlopen() should raise IOError for many error codes. |
||||
self.fakehttp('''HTTP/1.1 401 Authentication Required |
||||
@@ -786,6 +811,35 @@ class Pathname_Tests(unittest.TestCase): |
||||
class Utility_Tests(unittest.TestCase): |
||||
"""Testcase to test the various utility functions in the urllib.""" |
||||
|
||||
+ def test_splithost(self): |
||||
+ splithost = urllib.splithost |
||||
+ self.assertEqual(splithost('//www.example.org:80/foo/bar/baz.html'), |
||||
+ ('www.example.org:80', '/foo/bar/baz.html')) |
||||
+ self.assertEqual(splithost('//www.example.org:80'), |
||||
+ ('www.example.org:80', '')) |
||||
+ self.assertEqual(splithost('/foo/bar/baz.html'), |
||||
+ (None, '/foo/bar/baz.html')) |
||||
+ |
||||
+ # bpo-30500: # starts a fragment. |
||||
+ self.assertEqual(splithost('//127.0.0.1#@host.com'), |
||||
+ ('127.0.0.1', '/#@host.com')) |
||||
+ self.assertEqual(splithost('//127.0.0.1#@host.com:80'), |
||||
+ ('127.0.0.1', '/#@host.com:80')) |
||||
+ self.assertEqual(splithost('//127.0.0.1:80#@host.com'), |
||||
+ ('127.0.0.1:80', '/#@host.com')) |
||||
+ |
||||
+ # Empty host is returned as empty string. |
||||
+ self.assertEqual(splithost("///file"), |
||||
+ ('', '/file')) |
||||
+ |
||||
+ # Trailing semicolon, question mark and hash symbol are kept. |
||||
+ self.assertEqual(splithost("//example.net/file;"), |
||||
+ ('example.net', '/file;')) |
||||
+ self.assertEqual(splithost("//example.net/file?"), |
||||
+ ('example.net', '/file?')) |
||||
+ self.assertEqual(splithost("//example.net/file#"), |
||||
+ ('example.net', '/file#')) |
||||
+ |
||||
def test_splitpasswd(self): |
||||
"""Some of the password examples are not sensible, but it is added to |
||||
confirming to RFC2617 and addressing issue4675. |
||||
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py |
||||
index c317b8d..63fefd6 100644 |
||||
--- a/Lib/test/test_urllib2.py |
||||
+++ b/Lib/test/test_urllib2.py |
||||
@@ -7,12 +7,16 @@ import StringIO |
||||
|
||||
import urllib2 |
||||
from urllib2 import Request, OpenerDirector |
||||
+import httplib |
||||
|
||||
try: |
||||
import ssl |
||||
except ImportError: |
||||
ssl = None |
||||
|
||||
+from test.test_urllib import FakeHTTPMixin |
||||
+ |
||||
+ |
||||
# XXX |
||||
# Request |
||||
# CacheFTPHandler (hard to write) |
||||
@@ -1243,7 +1247,7 @@ class HandlerTests(unittest.TestCase): |
||||
self.assertEqual(len(http_handler.requests), 1) |
||||
self.assertFalse(http_handler.requests[0].has_header(auth_header)) |
||||
|
||||
-class MiscTests(unittest.TestCase): |
||||
+class MiscTests(unittest.TestCase, FakeHTTPMixin): |
||||
|
||||
def test_build_opener(self): |
||||
class MyHTTPHandler(urllib2.HTTPHandler): pass |
||||
@@ -1289,6 +1293,53 @@ class MiscTests(unittest.TestCase): |
||||
else: |
||||
self.assertTrue(False) |
||||
|
||||
+ @unittest.skipUnless(ssl, "ssl module required") |
||||
+ def test_url_with_control_char_rejected(self): |
||||
+ for char_no in range(0, 0x21) + range(0x7f, 0x100): |
||||
+ char = chr(char_no) |
||||
+ schemeless_url = "//localhost:7777/test%s/" % char |
||||
+ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") |
||||
+ try: |
||||
+ # We explicitly test urllib.request.urlopen() instead of the top |
||||
+ # level 'def urlopen()' function defined in this... (quite ugly) |
||||
+ # test suite. They use different url opening codepaths. Plain |
||||
+ # urlopen uses FancyURLOpener which goes via a codepath that |
||||
+ # calls urllib.parse.quote() on the URL which makes all of the |
||||
+ # above attempts at injection within the url _path_ safe. |
||||
+ escaped_char_repr = repr(char).replace('\\', r'\\') |
||||
+ InvalidURL = httplib.InvalidURL |
||||
+ with self.assertRaisesRegexp( |
||||
+ InvalidURL, "contain control.*" + escaped_char_repr): |
||||
+ urllib2.urlopen("http:" + schemeless_url) |
||||
+ with self.assertRaisesRegexp( |
||||
+ InvalidURL, "contain control.*" + escaped_char_repr): |
||||
+ urllib2.urlopen("https:" + schemeless_url) |
||||
+ finally: |
||||
+ self.unfakehttp() |
||||
+ |
||||
+ @unittest.skipUnless(ssl, "ssl module required") |
||||
+ def test_url_with_newline_header_injection_rejected(self): |
||||
+ self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") |
||||
+ host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123" |
||||
+ schemeless_url = "//" + host + ":8080/test/?test=a" |
||||
+ try: |
||||
+ # We explicitly test urllib2.urlopen() instead of the top |
||||
+ # level 'def urlopen()' function defined in this... (quite ugly) |
||||
+ # test suite. They use different url opening codepaths. Plain |
||||
+ # urlopen uses FancyURLOpener which goes via a codepath that |
||||
+ # calls urllib.parse.quote() on the URL which makes all of the |
||||
+ # above attempts at injection within the url _path_ safe. |
||||
+ InvalidURL = httplib.InvalidURL |
||||
+ with self.assertRaisesRegexp( |
||||
+ InvalidURL, r"contain control.*\\r.*(found at least . .)"): |
||||
+ urllib2.urlopen("http:" + schemeless_url) |
||||
+ with self.assertRaisesRegexp(InvalidURL, r"contain control.*\\n"): |
||||
+ urllib2.urlopen("https:" + schemeless_url) |
||||
+ finally: |
||||
+ self.unfakehttp() |
||||
+ |
||||
+ |
||||
+ |
||||
class RequestTests(unittest.TestCase): |
||||
|
||||
def setUp(self): |
||||
diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py |
||||
index 79e862a..347b494 100644 |
||||
--- a/Lib/test/test_xmlrpc.py |
||||
+++ b/Lib/test/test_xmlrpc.py |
||||
@@ -592,7 +592,13 @@ class SimpleServerTestCase(BaseServerTestCase): |
||||
def test_partial_post(self): |
||||
# Check that a partial POST doesn't make the server loop: issue #14001. |
||||
conn = httplib.HTTPConnection(ADDR, PORT) |
||||
- conn.request('POST', '/RPC2 HTTP/1.0\r\nContent-Length: 100\r\n\r\nbye') |
||||
+ conn.send('POST /RPC2 HTTP/1.0\r\n' |
||||
+ 'Content-Length: 100\r\n\r\n' |
||||
+ 'bye HTTP/1.1\r\n' |
||||
+ 'Host: %s:%s\r\n' |
||||
+ 'Accept-Encoding: identity\r\n' |
||||
+ 'Content-Length: 0\r\n\r\n' |
||||
+ % (ADDR, PORT)) |
||||
conn.close() |
||||
|
||||
class MultiPathServerTestCase(BaseServerTestCase): |
||||
diff --git a/Lib/urllib.py b/Lib/urllib.py |
||||
index 9b31df1..2201e3e 100644 |
||||
--- a/Lib/urllib.py |
||||
+++ b/Lib/urllib.py |
||||
@@ -1079,8 +1079,7 @@ def splithost(url): |
||||
"""splithost('//host[:port]/path') --> 'host[:port]', '/path'.""" |
||||
global _hostprog |
||||
if _hostprog is None: |
||||
- import re |
||||
- _hostprog = re.compile('^//([^/?]*)(.*)$') |
||||
+ _hostprog = re.compile('//([^/#?]*)(.*)', re.DOTALL) |
||||
|
||||
match = _hostprog.match(url) |
||||
if match: |
@ -0,0 +1,37 @@
@@ -0,0 +1,37 @@
|
||||
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py |
||||
index d2da0f8..7813b9f 100644 |
||||
--- a/Lib/test/test_urllib.py |
||||
+++ b/Lib/test/test_urllib.py |
||||
@@ -872,6 +872,17 @@ class URLopener_Tests(unittest.TestCase): |
||||
"spam://c:|windows%/:=&?~#+!$,;'@()*[]|/path/"), |
||||
"//c:|windows%/:=&?~#+!$,;'@()*[]|/path/") |
||||
|
||||
+ def test_local_file_open(self): |
||||
+ # bpo-35907, CVE-2019-9948: urllib must reject local_file:// scheme |
||||
+ class DummyURLopener(urllib.URLopener): |
||||
+ def open_local_file(self, url): |
||||
+ return url |
||||
+ for url in ('local_file://example', 'local-file://example'): |
||||
+ self.assertRaises(IOError, urllib.urlopen, url) |
||||
+ self.assertRaises(IOError, urllib.URLopener().open, url) |
||||
+ self.assertRaises(IOError, urllib.URLopener().retrieve, url) |
||||
+ self.assertRaises(IOError, DummyURLopener().open, url) |
||||
+ self.assertRaises(IOError, DummyURLopener().retrieve, url) |
||||
|
||||
# Just commented them out. |
||||
# Can't really tell why keep failing in windows and sparc. |
||||
diff --git a/Lib/urllib.py b/Lib/urllib.py |
||||
index 2201e3e..71e3637 100644 |
||||
--- a/Lib/urllib.py |
||||
+++ b/Lib/urllib.py |
||||
@@ -198,7 +198,9 @@ class URLopener: |
||||
name = 'open_' + urltype |
||||
self.type = urltype |
||||
name = name.replace('-', '_') |
||||
- if not hasattr(self, name): |
||||
+ |
||||
+ # bpo-35907: disallow the file reading with the type not allowed |
||||
+ if not hasattr(self, name) or name == 'open_local_file': |
||||
if proxy: |
||||
return self.open_unknown_proxy(proxy, fullurl, data) |
||||
else: |
@ -0,0 +1,93 @@
@@ -0,0 +1,93 @@
|
||||
diff --git a/Lib/cookielib.py b/Lib/cookielib.py |
||||
index f9c8d2f..9144e1f 100644 |
||||
--- a/Lib/cookielib.py |
||||
+++ b/Lib/cookielib.py |
||||
@@ -1123,6 +1123,11 @@ class DefaultCookiePolicy(CookiePolicy): |
||||
req_host, erhn = eff_request_host(request) |
||||
domain = cookie.domain |
||||
|
||||
+ if domain and not domain.startswith("."): |
||||
+ dotdomain = "." + domain |
||||
+ else: |
||||
+ dotdomain = domain |
||||
+ |
||||
# strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't |
||||
if (cookie.version == 0 and |
||||
(self.strict_ns_domain & self.DomainStrictNonDomain) and |
||||
@@ -1135,7 +1140,7 @@ class DefaultCookiePolicy(CookiePolicy): |
||||
_debug(" effective request-host name %s does not domain-match " |
||||
"RFC 2965 cookie domain %s", erhn, domain) |
||||
return False |
||||
- if cookie.version == 0 and not ("."+erhn).endswith(domain): |
||||
+ if cookie.version == 0 and not ("."+erhn).endswith(dotdomain): |
||||
_debug(" request-host %s does not match Netscape cookie domain " |
||||
"%s", req_host, domain) |
||||
return False |
||||
@@ -1149,7 +1154,11 @@ class DefaultCookiePolicy(CookiePolicy): |
||||
req_host = "."+req_host |
||||
if not erhn.startswith("."): |
||||
erhn = "."+erhn |
||||
- if not (req_host.endswith(domain) or erhn.endswith(domain)): |
||||
+ if domain and not domain.startswith("."): |
||||
+ dotdomain = "." + domain |
||||
+ else: |
||||
+ dotdomain = domain |
||||
+ if not (req_host.endswith(dotdomain) or erhn.endswith(dotdomain)): |
||||
#_debug(" request domain %s does not match cookie domain %s", |
||||
# req_host, domain) |
||||
return False |
||||
diff --git a/Lib/test/test_cookielib.py b/Lib/test/test_cookielib.py |
||||
index dd0ad32..b4f5ea0 100644 |
||||
--- a/Lib/test/test_cookielib.py |
||||
+++ b/Lib/test/test_cookielib.py |
||||
@@ -353,6 +353,7 @@ class CookieTests(TestCase): |
||||
("http://foo.bar.com/", ".foo.bar.com", True), |
||||
("http://foo.bar.com/", "foo.bar.com", True), |
||||
("http://foo.bar.com/", ".bar.com", True), |
||||
+ ("http://foo.bar.com/", "bar.com", True), |
||||
("http://foo.bar.com/", "com", True), |
||||
("http://foo.com/", "rhubarb.foo.com", False), |
||||
("http://foo.com/", ".foo.com", True), |
||||
@@ -363,6 +364,8 @@ class CookieTests(TestCase): |
||||
("http://foo/", "foo", True), |
||||
("http://foo/", "foo.local", True), |
||||
("http://foo/", ".local", True), |
||||
+ ("http://barfoo.com", ".foo.com", False), |
||||
+ ("http://barfoo.com", "foo.com", False), |
||||
]: |
||||
request = urllib2.Request(url) |
||||
r = pol.domain_return_ok(domain, request) |
||||
@@ -910,6 +913,33 @@ class CookieTests(TestCase): |
||||
c.add_cookie_header(req) |
||||
self.assertTrue(not req.has_header("Cookie")) |
||||
|
||||
+ c.clear() |
||||
+ |
||||
+ pol.set_blocked_domains([]) |
||||
+ req = Request("http://acme.com/") |
||||
+ res = FakeResponse(headers, "http://acme.com/") |
||||
+ cookies = c.make_cookies(res, req) |
||||
+ c.extract_cookies(res, req) |
||||
+ self.assertEqual(len(c), 1) |
||||
+ |
||||
+ req = Request("http://acme.com/") |
||||
+ c.add_cookie_header(req) |
||||
+ self.assertTrue(req.has_header("Cookie")) |
||||
+ |
||||
+ req = Request("http://badacme.com/") |
||||
+ c.add_cookie_header(req) |
||||
+ self.assertFalse(pol.return_ok(cookies[0], req)) |
||||
+ self.assertFalse(req.has_header("Cookie")) |
||||
+ |
||||
+ p = pol.set_blocked_domains(["acme.com"]) |
||||
+ req = Request("http://acme.com/") |
||||
+ c.add_cookie_header(req) |
||||
+ self.assertFalse(req.has_header("Cookie")) |
||||
+ |
||||
+ req = Request("http://badacme.com/") |
||||
+ c.add_cookie_header(req) |
||||
+ self.assertFalse(req.has_header("Cookie")) |
||||
+ |
||||
def test_secure(self): |
||||
from cookielib import CookieJar, DefaultCookiePolicy |
||||
|
@ -0,0 +1,54 @@
@@ -0,0 +1,54 @@
|
||||
diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py |
||||
index 690db2c..dc49d2e 100644 |
||||
--- a/Lib/email/_parseaddr.py |
||||
+++ b/Lib/email/_parseaddr.py |
||||
@@ -336,7 +336,12 @@ class AddrlistClass: |
||||
aslist.append('@') |
||||
self.pos += 1 |
||||
self.gotonext() |
||||
- return EMPTYSTRING.join(aslist) + self.getdomain() |
||||
+ domain = self.getdomain() |
||||
+ if not domain: |
||||
+ # Invalid domain, return an empty address instead of returning a |
||||
+ # local part to denote failed parsing. |
||||
+ return EMPTYSTRING |
||||
+ return EMPTYSTRING.join(aslist) + domain |
||||
|
||||
def getdomain(self): |
||||
"""Get the complete domain name from an address.""" |
||||
@@ -351,6 +356,10 @@ class AddrlistClass: |
||||
elif self.field[self.pos] == '.': |
||||
self.pos += 1 |
||||
sdlist.append('.') |
||||
+ elif self.field[self.pos] == '@': |
||||
+ # bpo-34155: Don't parse domains with two `@` like |
||||
+ # `a@malicious.org@important.com`. |
||||
+ return EMPTYSTRING |
||||
elif self.field[self.pos] in self.atomends: |
||||
break |
||||
else: |
||||
diff --git a/Lib/email/test/test_email.py b/Lib/email/test/test_email.py |
||||
index b32da9d..1739033 100644 |
||||
--- a/Lib/email/test/test_email.py |
||||
+++ b/Lib/email/test/test_email.py |
||||
@@ -2308,6 +2308,20 @@ class TestMiscellaneous(TestEmailBase): |
||||
self.assertEqual(Utils.parseaddr('<>'), ('', '')) |
||||
self.assertEqual(Utils.formataddr(Utils.parseaddr('<>')), '') |
||||
|
||||
+ def test_parseaddr_multiple_domains(self): |
||||
+ self.assertEqual( |
||||
+ Utils.parseaddr('a@b@c'), |
||||
+ ('', '') |
||||
+ ) |
||||
+ self.assertEqual( |
||||
+ Utils.parseaddr('a@b.c@c'), |
||||
+ ('', '') |
||||
+ ) |
||||
+ self.assertEqual( |
||||
+ Utils.parseaddr('a@172.17.0.1@c'), |
||||
+ ('', '') |
||||
+ ) |
||||
+ |
||||
def test_noquote_dump(self): |
||||
self.assertEqual( |
||||
Utils.formataddr(('A Silly Person', 'person@dom.ain')), |
@ -0,0 +1,144 @@
@@ -0,0 +1,144 @@
|
||||
diff --git a/Lib/DocXMLRPCServer.py b/Lib/DocXMLRPCServer.py |
||||
index 4064ec2..90b037d 100644 |
||||
--- a/Lib/DocXMLRPCServer.py |
||||
+++ b/Lib/DocXMLRPCServer.py |
||||
@@ -20,6 +20,16 @@ from SimpleXMLRPCServer import (SimpleXMLRPCServer, |
||||
CGIXMLRPCRequestHandler, |
||||
resolve_dotted_attribute) |
||||
|
||||
+ |
||||
+def _html_escape_quote(s): |
||||
+ s = s.replace("&", "&") # Must be done first! |
||||
+ s = s.replace("<", "<") |
||||
+ s = s.replace(">", ">") |
||||
+ s = s.replace('"', """) |
||||
+ s = s.replace('\'', "'") |
||||
+ return s |
||||
+ |
||||
+ |
||||
class ServerHTMLDoc(pydoc.HTMLDoc): |
||||
"""Class used to generate pydoc HTML document for a server""" |
||||
|
||||
@@ -210,7 +220,8 @@ class XMLRPCDocGenerator: |
||||
methods |
||||
) |
||||
|
||||
- return documenter.page(self.server_title, documentation) |
||||
+ title = _html_escape_quote(self.server_title) |
||||
+ return documenter.page(title, documentation) |
||||
|
||||
class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler): |
||||
"""XML-RPC and documentation request handler class. |
||||
diff --git a/Lib/test/test_docxmlrpc.py b/Lib/test/test_docxmlrpc.py |
||||
index 80d1803..d464ef8 100644 |
||||
--- a/Lib/test/test_docxmlrpc.py |
||||
+++ b/Lib/test/test_docxmlrpc.py |
||||
@@ -1,13 +1,11 @@ |
||||
from DocXMLRPCServer import DocXMLRPCServer |
||||
import httplib |
||||
+import re |
||||
import sys |
||||
from test import test_support |
||||
threading = test_support.import_module('threading') |
||||
-import time |
||||
-import socket |
||||
import unittest |
||||
|
||||
-PORT = None |
||||
|
||||
def make_request_and_skipIf(condition, reason): |
||||
# If we skip the test, we have to make a request because the |
||||
@@ -23,13 +21,10 @@ def make_request_and_skipIf(condition, reason): |
||||
return decorator |
||||
|
||||
|
||||
-def server(evt, numrequests): |
||||
+def make_server(): |
||||
serv = DocXMLRPCServer(("localhost", 0), logRequests=False) |
||||
|
||||
try: |
||||
- global PORT |
||||
- PORT = serv.socket.getsockname()[1] |
||||
- |
||||
# Add some documentation |
||||
serv.set_server_title("DocXMLRPCServer Test Documentation") |
||||
serv.set_server_name("DocXMLRPCServer Test Docs") |
||||
@@ -56,42 +51,31 @@ def server(evt, numrequests): |
||||
|
||||
serv.register_function(add) |
||||
serv.register_function(lambda x, y: x-y) |
||||
- |
||||
- while numrequests > 0: |
||||
- serv.handle_request() |
||||
- numrequests -= 1 |
||||
- except socket.timeout: |
||||
- pass |
||||
- finally: |
||||
+ return serv |
||||
+ except: |
||||
serv.server_close() |
||||
- PORT = None |
||||
- evt.set() |
||||
+ raise |
||||
|
||||
class DocXMLRPCHTTPGETServer(unittest.TestCase): |
||||
def setUp(self): |
||||
- self._threads = test_support.threading_setup() |
||||
# Enable server feedback |
||||
DocXMLRPCServer._send_traceback_header = True |
||||
|
||||
- self.evt = threading.Event() |
||||
- threading.Thread(target=server, args=(self.evt, 1)).start() |
||||
- |
||||
- # wait for port to be assigned |
||||
- n = 1000 |
||||
- while n > 0 and PORT is None: |
||||
- time.sleep(0.001) |
||||
- n -= 1 |
||||
+ self.serv = make_server() |
||||
+ self.thread = threading.Thread(target=self.serv.serve_forever) |
||||
+ self.thread.start() |
||||
|
||||
+ PORT = self.serv.server_address[1] |
||||
self.client = httplib.HTTPConnection("localhost:%d" % PORT) |
||||
|
||||
def tearDown(self): |
||||
self.client.close() |
||||
|
||||
- self.evt.wait() |
||||
- |
||||
# Disable server feedback |
||||
DocXMLRPCServer._send_traceback_header = False |
||||
- test_support.threading_cleanup(*self._threads) |
||||
+ self.serv.shutdown() |
||||
+ self.thread.join() |
||||
+ self.serv.server_close() |
||||
|
||||
def test_valid_get_response(self): |
||||
self.client.request("GET", "/") |
||||
@@ -194,6 +178,25 @@ class DocXMLRPCHTTPGETServer(unittest.TestCase): |
||||
self.assertIn("""Try self.<strong>add</strong>, too.""", |
||||
response.read()) |
||||
|
||||
+ def test_server_title_escape(self): |
||||
+ """Test that the server title and documentation |
||||
+ are escaped for HTML. |
||||
+ """ |
||||
+ self.serv.set_server_title('test_title<script>') |
||||
+ self.serv.set_server_documentation('test_documentation<script>') |
||||
+ self.assertEqual('test_title<script>', self.serv.server_title) |
||||
+ self.assertEqual('test_documentation<script>', |
||||
+ self.serv.server_documentation) |
||||
+ |
||||
+ generated = self.serv.generate_html_documentation() |
||||
+ title = re.search(r'<title>(.+?)</title>', generated).group() |
||||
+ documentation = re.search(r'<p><tt>(.+?)</tt></p>', generated).group() |
||||
+ self.assertEqual('<title>Python: test_title<script></title>', |
||||
+ title) |
||||
+ self.assertEqual('<p><tt>test_documentation<script></tt></p>', |
||||
+ documentation) |
||||
+ |
||||
+ |
||||
def test_main(): |
||||
test_support.run_unittest(DocXMLRPCHTTPGETServer) |
||||
|
@ -0,0 +1,78 @@
@@ -0,0 +1,78 @@
|
||||
From cb33ceb1b0ec5ec1cf8cb8239ea2705508501afc Mon Sep 17 00:00:00 2001 |
||||
From: Rishi <rishi_devan@mail.com> |
||||
Date: Wed, 15 Jul 2020 13:51:00 +0200 |
||||
Subject: [PATCH] 00351-cve-2019-20907-fix-infinite-loop-in-tarfile.patch |
||||
|
||||
00351 # |
||||
Avoid infinite loop when reading specially crafted TAR files using the tarfile module |
||||
(CVE-2019-20907). |
||||
See: https://bugs.python.org/issue39017 |
||||
--- |
||||
Lib/tarfile.py | 2 ++ |
||||
Lib/test/recursion.tar | Bin 0 -> 516 bytes |
||||
Lib/test/test_tarfile.py | 8 ++++++++ |
||||
.../2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst | 1 + |
||||
4 files changed, 11 insertions(+) |
||||
create mode 100644 Lib/test/recursion.tar |
||||
create mode 100644 Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst |
||||
|
||||
diff --git a/Lib/tarfile.py b/Lib/tarfile.py |
||||
index 16a6e86..ddddc1b 100644 |
||||
--- a/Lib/tarfile.py |
||||
+++ b/Lib/tarfile.py |
||||
@@ -1388,6 +1388,8 @@ class TarInfo(object): |
||||
|
||||
length, keyword = match.groups() |
||||
length = int(length) |
||||
+ if length == 0: |
||||
+ raise InvalidHeaderError("invalid header") |
||||
value = buf[match.end(2) + 1:match.start(1) + length - 1] |
||||
|
||||
keyword = keyword.decode("utf8") |
||||
diff --git a/Lib/test/recursion.tar b/Lib/test/recursion.tar |
||||
new file mode 100644 |
||||
index 0000000000000000000000000000000000000000..b8237251964983f54ed1966297e887636cd0c5f4 |
||||
GIT binary patch |
||||
literal 516 |
||||
zcmYdFPRz+kEn=W0Fn}74P8%Xw3X=l~85kIuo0>8xq$A1Gm}!7)KUsFc41m#O8A5+e |
||||
I1_}|j06>QaCIA2c |
||||
|
||||
literal 0 |
||||
HcmV?d00001 |
||||
|
||||
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py |
||||
index 69d342a..9aa6ea6 100644 |
||||
--- a/Lib/test/test_tarfile.py |
||||
+++ b/Lib/test/test_tarfile.py |
||||
@@ -11,6 +11,7 @@ import unittest |
||||
import tarfile |
||||
|
||||
from test import test_support |
||||
+from test import test_support as support |
||||
|
||||
# Check for our compression modules. |
||||
try: |
||||
@@ -206,6 +207,13 @@ class CommonReadTest(ReadTest): |
||||
|
||||
class MiscReadTest(CommonReadTest): |
||||
|
||||
+ def test_length_zero_header(self): |
||||
+ # bpo-39017 (CVE-2019-20907): reading a zero-length header should fail |
||||
+ # with an exception |
||||
+ with self.assertRaisesRegexp(tarfile.ReadError, "file could not be opened successfully"): |
||||
+ with tarfile.open(support.findfile('recursion.tar')) as tar: |
||||
+ pass |
||||
+ |
||||
def test_no_name_argument(self): |
||||
fobj = open(self.tarname, "rb") |
||||
tar = tarfile.open(fileobj=fobj, mode=self.mode) |
||||
diff --git a/Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst b/Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst |
||||
new file mode 100644 |
||||
index 0000000..ad26676 |
||||
--- /dev/null |
||||
+++ b/Misc/NEWS.d/next/Library/2020-07-12-22-16-58.bpo-39017.x3Cg-9.rst |
||||
@@ -0,0 +1 @@ |
||||
+Avoid infinite loop when reading specially crafted TAR files using the tarfile module (CVE-2019-20907). |
||||
-- |
||||
2.26.2 |
||||
|
@ -0,0 +1,129 @@
@@ -0,0 +1,129 @@
|
||||
|
||||
# HG changeset patch |
||||
# User Serhiy Storchaka <storchaka@gmail.com> |
||||
# Date 1369166013 -10800 |
||||
# Node ID 8408eed151ebee1c546414f1f40be46c1ad76077 |
||||
# Parent 7fce9186accb10122e45d975f4b380c2ed0fae35 |
||||
Issue #17979: Fixed the re module in build with --disable-unicode. |
||||
|
||||
diff --git a/Modules/sre.h b/Modules/sre.h |
||||
--- a/Modules/sre.h |
||||
+++ b/Modules/sre.h |
||||
@@ -23,8 +23,8 @@ |
||||
# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX + 1u) |
||||
# endif |
||||
#else |
||||
-# define SRE_CODE unsigned long |
||||
-# if SIZEOF_SIZE_T > SIZEOF_LONG |
||||
+# define SRE_CODE unsigned int |
||||
+# if SIZEOF_SIZE_T > SIZEOF_INT |
||||
# define SRE_MAXREPEAT (~(SRE_CODE)0) |
||||
# else |
||||
# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX + 1u) |
||||
|
||||
|
||||
# HG changeset patch |
||||
# User Serhiy Storchaka <storchaka@gmail.com> |
||||
# Date 1375547193 -10800 |
||||
# Node ID e5e425fd1e4f7e859abdced43621203cdfa87a16 |
||||
# Parent 8205e72b5cfcdb7a3450c80f3368eff610bc650c |
||||
Issue #17998: Fix an internal error in regular expression engine. |
||||
|
||||
diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py |
||||
--- a/Lib/test/test_re.py |
||||
+++ b/Lib/test/test_re.py |
||||
@@ -907,6 +907,16 @@ class ReTests(unittest.TestCase): |
||||
self.assertEqual(m.group(1), "") |
||||
self.assertEqual(m.group(2), "y") |
||||
|
||||
+ def test_issue17998(self): |
||||
+ for reps in '*', '+', '?', '{1}': |
||||
+ for mod in '', '?': |
||||
+ pattern = '.' + reps + mod + 'yz' |
||||
+ self.assertEqual(re.compile(pattern, re.S).findall('xyz'), |
||||
+ ['xyz'], msg=pattern) |
||||
+ pattern = pattern.encode() |
||||
+ self.assertEqual(re.compile(pattern, re.S).findall(b'xyz'), |
||||
+ [b'xyz'], msg=pattern) |
||||
+ |
||||
|
||||
|
||||
def run_re_tests(): |
||||
diff --git a/Modules/_sre.c b/Modules/_sre.c |
||||
--- a/Modules/_sre.c |
||||
+++ b/Modules/_sre.c |
||||
@@ -1028,7 +1028,7 @@ entrance: |
||||
TRACE(("|%p|%p|REPEAT_ONE %d %d\n", ctx->pattern, ctx->ptr, |
||||
ctx->pattern[1], ctx->pattern[2])); |
||||
|
||||
- if (ctx->pattern[1] > end - ctx->ptr) |
||||
+ if ((Py_ssize_t) ctx->pattern[1] > end - ctx->ptr) |
||||
RETURN_FAILURE; /* cannot match */ |
||||
|
||||
state->ptr = ctx->ptr; |
||||
@@ -1111,7 +1111,7 @@ entrance: |
||||
TRACE(("|%p|%p|MIN_REPEAT_ONE %d %d\n", ctx->pattern, ctx->ptr, |
||||
ctx->pattern[1], ctx->pattern[2])); |
||||
|
||||
- if (ctx->pattern[1] > end - ctx->ptr) |
||||
+ if ((Py_ssize_t) ctx->pattern[1] > end - ctx->ptr) |
||||
RETURN_FAILURE; /* cannot match */ |
||||
|
||||
state->ptr = ctx->ptr; |
||||
@@ -1210,7 +1210,7 @@ entrance: |
||||
TRACE(("|%p|%p|MAX_UNTIL %d\n", ctx->pattern, |
||||
ctx->ptr, ctx->count)); |
||||
|
||||
- if (ctx->count < ctx->u.rep->pattern[1]) { |
||||
+ if (ctx->count < (Py_ssize_t) ctx->u.rep->pattern[1]) { |
||||
/* not enough matches */ |
||||
ctx->u.rep->count = ctx->count; |
||||
DO_JUMP(JUMP_MAX_UNTIL_1, jump_max_until_1, |
||||
@@ -1224,7 +1224,7 @@ entrance: |
||||
RETURN_FAILURE; |
||||
} |
||||
|
||||
- if ((ctx->count < ctx->u.rep->pattern[2] || |
||||
+ if ((ctx->count < (Py_ssize_t) ctx->u.rep->pattern[2] || |
||||
ctx->u.rep->pattern[2] == SRE_MAXREPEAT) && |
||||
state->ptr != ctx->u.rep->last_ptr) { |
||||
/* we may have enough matches, but if we can |
||||
@@ -1273,7 +1273,7 @@ entrance: |
||||
TRACE(("|%p|%p|MIN_UNTIL %d %p\n", ctx->pattern, |
||||
ctx->ptr, ctx->count, ctx->u.rep->pattern)); |
||||
|
||||
- if (ctx->count < ctx->u.rep->pattern[1]) { |
||||
+ if (ctx->count < (Py_ssize_t) ctx->u.rep->pattern[1]) { |
||||
/* not enough matches */ |
||||
ctx->u.rep->count = ctx->count; |
||||
DO_JUMP(JUMP_MIN_UNTIL_1, jump_min_until_1, |
||||
@@ -1302,7 +1302,7 @@ entrance: |
||||
|
||||
LASTMARK_RESTORE(); |
||||
|
||||
- if ((ctx->count >= ctx->u.rep->pattern[2] |
||||
+ if ((ctx->count >= (Py_ssize_t) ctx->u.rep->pattern[2] |
||||
&& ctx->u.rep->pattern[2] != SRE_MAXREPEAT) || |
||||
state->ptr == ctx->u.rep->last_ptr) |
||||
RETURN_FAILURE; |
||||
diff --git a/Modules/sre.h b/Modules/sre.h |
||||
--- a/Modules/sre.h |
||||
+++ b/Modules/sre.h |
||||
@@ -20,14 +20,14 @@ |
||||
# if SIZEOF_SIZE_T > 4 |
||||
# define SRE_MAXREPEAT (~(SRE_CODE)0) |
||||
# else |
||||
-# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX + 1u) |
||||
+# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX) |
||||
# endif |
||||
#else |
||||
# define SRE_CODE unsigned int |
||||
# if SIZEOF_SIZE_T > SIZEOF_INT |
||||
# define SRE_MAXREPEAT (~(SRE_CODE)0) |
||||
# else |
||||
-# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX + 1u) |
||||
+# define SRE_MAXREPEAT ((SRE_CODE)PY_SSIZE_T_MAX) |
||||
# endif |
||||
#endif |
||||
|
||||
|
Loading…
Reference in new issue