basebuilder_pel7ppc64lebuilder0
3 years ago
15 changed files with 1556 additions and 1 deletions
@ -0,0 +1,30 @@
@@ -0,0 +1,30 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 |
||||
From: David Malcolm <dmalcolm@redhat.com> |
||||
Date: Wed, 13 Jan 2010 21:25:18 +0000 |
||||
Subject: [PATCH] 00001: Fixup distutils/unixccompiler.py to remove standard |
||||
library path from rpath Was Patch0 in ivazquez' python3000 specfile |
||||
|
||||
--- |
||||
Lib/distutils/unixccompiler.py | 9 +++++++++ |
||||
1 file changed, 9 insertions(+) |
||||
|
||||
diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py |
||||
index 4d7a6de740..353086a648 100644 |
||||
--- a/Lib/distutils/unixccompiler.py |
||||
+++ b/Lib/distutils/unixccompiler.py |
||||
@@ -82,6 +82,15 @@ class UnixCCompiler(CCompiler): |
||||
if sys.platform == "cygwin": |
||||
exe_extension = ".exe" |
||||
|
||||
+ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): |
||||
+ """Remove standard library path from rpath""" |
||||
+ libraries, library_dirs, runtime_library_dirs = super()._fix_lib_args( |
||||
+ libraries, library_dirs, runtime_library_dirs) |
||||
+ libdir = sysconfig.get_config_var('LIBDIR') |
||||
+ if runtime_library_dirs and (libdir in runtime_library_dirs): |
||||
+ runtime_library_dirs.remove(libdir) |
||||
+ return libraries, library_dirs, runtime_library_dirs |
||||
+ |
||||
def preprocess(self, source, output_file=None, macros=None, |
||||
include_dirs=None, extra_preargs=None, extra_postargs=None): |
||||
fixed_args = self._fix_compile_args(None, macros, include_dirs) |
@ -0,0 +1,102 @@
@@ -0,0 +1,102 @@
|
||||
From eaf7ea1fc339e1ff348ed941ed2e8c4d66f3e458 Mon Sep 17 00:00:00 2001 |
||||
From: Josh Stone <jistone@redhat.com> |
||||
Date: Thu, 18 Feb 2021 19:14:58 -0800 |
||||
Subject: [PATCH] Revert "Auto merge of #79547 - erikdesjardins:byval, |
||||
r=nagisa" |
||||
|
||||
This reverts commit a094ff9590b83c8f94d898f92c2964a5803ded06, reversing |
||||
changes made to d37afad0cc87bf709ad10c85319296ac53030f03. |
||||
--- |
||||
compiler/rustc_middle/src/ty/layout.rs | 12 ++++++------ |
||||
...return-value-in-reg.rs => return-value-in-reg.rs} | 4 ++-- |
||||
src/test/codegen/union-abi.rs | 11 +++-------- |
||||
3 files changed, 11 insertions(+), 16 deletions(-) |
||||
rename src/test/codegen/{arg-return-value-in-reg.rs => return-value-in-reg.rs} (74%) |
||||
|
||||
diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs |
||||
index b545b92c9252..545f6aee1a21 100644 |
||||
--- a/compiler/rustc_middle/src/ty/layout.rs |
||||
+++ b/compiler/rustc_middle/src/ty/layout.rs |
||||
@@ -2849,7 +2849,7 @@ fn adjust_for_abi(&mut self, cx: &C, abi: SpecAbi) { |
||||
|| abi == SpecAbi::RustIntrinsic |
||||
|| abi == SpecAbi::PlatformIntrinsic |
||||
{ |
||||
- let fixup = |arg: &mut ArgAbi<'tcx, Ty<'tcx>>| { |
||||
+ let fixup = |arg: &mut ArgAbi<'tcx, Ty<'tcx>>, is_ret: bool| { |
||||
if arg.is_ignore() { |
||||
return; |
||||
} |
||||
@@ -2887,9 +2887,9 @@ fn adjust_for_abi(&mut self, cx: &C, abi: SpecAbi) { |
||||
_ => return, |
||||
} |
||||
|
||||
- // Pass and return structures up to 2 pointers in size by value, matching `ScalarPair`. |
||||
- // LLVM will usually pass these in 2 registers, which is more efficient than by-ref. |
||||
- let max_by_val_size = Pointer.size(cx) * 2; |
||||
+ // Return structures up to 2 pointers in size by value, matching `ScalarPair`. LLVM |
||||
+ // will usually return these in 2 registers, which is more efficient than by-ref. |
||||
+ let max_by_val_size = if is_ret { Pointer.size(cx) * 2 } else { Pointer.size(cx) }; |
||||
let size = arg.layout.size; |
||||
|
||||
if arg.layout.is_unsized() || size > max_by_val_size { |
||||
@@ -2901,9 +2901,9 @@ fn adjust_for_abi(&mut self, cx: &C, abi: SpecAbi) { |
||||
arg.cast_to(Reg { kind: RegKind::Integer, size }); |
||||
} |
||||
}; |
||||
- fixup(&mut self.ret); |
||||
+ fixup(&mut self.ret, true); |
||||
for arg in &mut self.args { |
||||
- fixup(arg); |
||||
+ fixup(arg, false); |
||||
} |
||||
return; |
||||
} |
||||
diff --git a/src/test/codegen/arg-return-value-in-reg.rs b/src/test/codegen/return-value-in-reg.rs |
||||
similarity index 74% |
||||
rename from src/test/codegen/arg-return-value-in-reg.rs |
||||
rename to src/test/codegen/return-value-in-reg.rs |
||||
index a69291d47821..4bc0136c5e32 100644 |
||||
--- a/src/test/codegen/arg-return-value-in-reg.rs |
||||
+++ b/src/test/codegen/return-value-in-reg.rs |
||||
@@ -1,4 +1,4 @@ |
||||
-//! Check that types of up to 128 bits are passed and returned by-value instead of via pointer. |
||||
+//! This test checks that types of up to 128 bits are returned by-value instead of via out-pointer. |
||||
|
||||
// compile-flags: -C no-prepopulate-passes -O |
||||
// only-x86_64 |
||||
@@ -11,7 +11,7 @@ pub struct S { |
||||
c: u32, |
||||
} |
||||
|
||||
-// CHECK: define i128 @modify(i128{{( %0)?}}) |
||||
+// CHECK: define i128 @modify(%S* noalias nocapture dereferenceable(16) %s) |
||||
#[no_mangle] |
||||
pub fn modify(s: S) -> S { |
||||
S { a: s.a + s.a, b: s.b + s.b, c: s.c + s.c } |
||||
diff --git a/src/test/codegen/union-abi.rs b/src/test/codegen/union-abi.rs |
||||
index f282fd237054..afea01e9a2d0 100644 |
||||
--- a/src/test/codegen/union-abi.rs |
||||
+++ b/src/test/codegen/union-abi.rs |
||||
@@ -63,16 +63,11 @@ pub union UnionU128{a:u128} |
||||
#[no_mangle] |
||||
pub fn test_UnionU128(_: UnionU128) -> UnionU128 { loop {} } |
||||
|
||||
-pub union UnionU128x2{a:(u128, u128)} |
||||
-// CHECK: define void @test_UnionU128x2(i128 %_1.0, i128 %_1.1) |
||||
-#[no_mangle] |
||||
-pub fn test_UnionU128x2(_: UnionU128x2) { loop {} } |
||||
- |
||||
#[repr(C)] |
||||
-pub union CUnionU128x2{a:(u128, u128)} |
||||
-// CHECK: define void @test_CUnionU128x2(%CUnionU128x2* {{.*}} %_1) |
||||
+pub union CUnionU128{a:u128} |
||||
+// CHECK: define void @test_CUnionU128(%CUnionU128* {{.*}} %_1) |
||||
#[no_mangle] |
||||
-pub fn test_CUnionU128x2(_: CUnionU128x2) { loop {} } |
||||
+pub fn test_CUnionU128(_: CUnionU128) { loop {} } |
||||
|
||||
pub union UnionBool { b:bool } |
||||
// CHECK: define zeroext i1 @test_UnionBool(i8 %b) |
||||
-- |
||||
2.29.2 |
||||
|
@ -0,0 +1,71 @@
@@ -0,0 +1,71 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 |
||||
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= <miro@hroncok.cz> |
||||
Date: Wed, 15 Aug 2018 15:36:29 +0200 |
||||
Subject: [PATCH] 00189: Instead of bundled wheels, use our RPM packaged wheels |
||||
|
||||
We keep them in /usr/share/python-wheels |
||||
|
||||
Downstream only: upstream bundles |
||||
We might eventually pursuit upstream support, but it's low prio |
||||
--- |
||||
Lib/ensurepip/__init__.py | 32 ++++++++++++++++++++++---------- |
||||
1 file changed, 22 insertions(+), 10 deletions(-) |
||||
|
||||
diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py |
||||
index 38bb42104b..413c1b300e 100644 |
||||
--- a/Lib/ensurepip/__init__.py |
||||
+++ b/Lib/ensurepip/__init__.py |
||||
@@ -1,6 +1,7 @@ |
||||
+import distutils.version |
||||
+import glob |
||||
import os |
||||
import os.path |
||||
-import pkgutil |
||||
import sys |
||||
import runpy |
||||
import tempfile |
||||
@@ -9,10 +10,24 @@ import subprocess |
||||
|
||||
__all__ = ["version", "bootstrap"] |
||||
|
||||
+_WHEEL_DIR = "/usr/share/python-wheels/" |
||||
|
||||
-_SETUPTOOLS_VERSION = "49.2.1" |
||||
+_wheels = {} |
||||
|
||||
-_PIP_VERSION = "20.2.3" |
||||
+def _get_most_recent_wheel_version(pkg): |
||||
+ prefix = os.path.join(_WHEEL_DIR, "{}-".format(pkg)) |
||||
+ _wheels[pkg] = {} |
||||
+ for suffix in "-py2.py3-none-any.whl", "-py3-none-any.whl": |
||||
+ pattern = "{}*{}".format(prefix, suffix) |
||||
+ for path in glob.glob(pattern): |
||||
+ version_str = path[len(prefix):-len(suffix)] |
||||
+ _wheels[pkg][version_str] = os.path.basename(path) |
||||
+ return str(max(_wheels[pkg], key=distutils.version.LooseVersion)) |
||||
+ |
||||
+ |
||||
+_SETUPTOOLS_VERSION = _get_most_recent_wheel_version("setuptools") |
||||
+ |
||||
+_PIP_VERSION = _get_most_recent_wheel_version("pip") |
||||
|
||||
_PROJECTS = [ |
||||
("setuptools", _SETUPTOOLS_VERSION, "py3"), |
||||
@@ -102,13 +117,10 @@ def _bootstrap(*, root=None, upgrade=False, user=False, |
||||
# additional paths that need added to sys.path |
||||
additional_paths = [] |
||||
for project, version, py_tag in _PROJECTS: |
||||
- wheel_name = "{}-{}-{}-none-any.whl".format(project, version, py_tag) |
||||
- whl = pkgutil.get_data( |
||||
- "ensurepip", |
||||
- "_bundled/{}".format(wheel_name), |
||||
- ) |
||||
- with open(os.path.join(tmpdir, wheel_name), "wb") as fp: |
||||
- fp.write(whl) |
||||
+ wheel_name = _wheels[project][version] |
||||
+ with open(os.path.join(_WHEEL_DIR, wheel_name), "rb") as sfp: |
||||
+ with open(os.path.join(tmpdir, wheel_name), "wb") as fp: |
||||
+ fp.write(sfp.read()) |
||||
|
||||
additional_paths.append(os.path.join(tmpdir, wheel_name)) |
||||
|
@ -0,0 +1,62 @@
@@ -0,0 +1,62 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 |
||||
From: Michal Cyprian <m.cyprian@gmail.com> |
||||
Date: Mon, 26 Jun 2017 16:32:56 +0200 |
||||
Subject: [PATCH] 00251: Change user install location |
||||
|
||||
Set values of prefix and exec_prefix in distutils install command |
||||
to /usr/local if executable is /usr/bin/python* and RPM build |
||||
is not detected to make pip and distutils install into separate location. |
||||
|
||||
Fedora Change: https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe |
||||
Downstream only: Awaiting resources to work on upstream PEP |
||||
--- |
||||
Lib/distutils/command/install.py | 15 +++++++++++++-- |
||||
Lib/site.py | 9 ++++++++- |
||||
2 files changed, 21 insertions(+), 3 deletions(-) |
||||
|
||||
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py |
||||
index ae4f915669..0e4fd5b74a 100644 |
||||
--- a/Lib/distutils/command/install.py |
||||
+++ b/Lib/distutils/command/install.py |
||||
@@ -418,8 +418,19 @@ class install(Command): |
||||
raise DistutilsOptionError( |
||||
"must not supply exec-prefix without prefix") |
||||
|
||||
- self.prefix = os.path.normpath(sys.prefix) |
||||
- self.exec_prefix = os.path.normpath(sys.exec_prefix) |
||||
+ # self.prefix is set to sys.prefix + /local/ |
||||
+ # if neither RPM build nor virtual environment is |
||||
+ # detected to make pip and distutils install packages |
||||
+ # into the separate location. |
||||
+ if (not (hasattr(sys, 'real_prefix') or |
||||
+ sys.prefix != sys.base_prefix) and |
||||
+ 'RPM_BUILD_ROOT' not in os.environ): |
||||
+ addition = "/local" |
||||
+ else: |
||||
+ addition = "" |
||||
+ |
||||
+ self.prefix = os.path.normpath(sys.prefix) + addition |
||||
+ self.exec_prefix = os.path.normpath(sys.exec_prefix) + addition |
||||
|
||||
else: |
||||
if self.exec_prefix is None: |
||||
diff --git a/Lib/site.py b/Lib/site.py |
||||
index 2e24e86988..2581269999 100644 |
||||
--- a/Lib/site.py |
||||
+++ b/Lib/site.py |
||||
@@ -348,7 +348,14 @@ def getsitepackages(prefixes=None): |
||||
return sitepackages |
||||
|
||||
def addsitepackages(known_paths, prefixes=None): |
||||
- """Add site-packages to sys.path""" |
||||
+ """Add site-packages to sys.path |
||||
+ |
||||
+ '/usr/local' is included in PREFIXES if RPM build is not detected |
||||
+ to make packages installed into this location visible. |
||||
+ |
||||
+ """ |
||||
+ if ENABLE_USER_SITE and 'RPM_BUILD_ROOT' not in os.environ: |
||||
+ PREFIXES.insert(0, "/usr/local") |
||||
for sitedir in getsitepackages(prefixes): |
||||
if os.path.isdir(sitedir): |
||||
addsitedir(sitedir, known_paths) |
@ -0,0 +1,58 @@
@@ -0,0 +1,58 @@
|
||||
diff -up Python-3.5.0/configure.ac.than Python-3.5.0/configure.ac |
||||
--- Python-3.5.0/configure.ac.than 2015-11-13 11:51:32.039560172 -0500 |
||||
+++ Python-3.5.0/configure.ac 2015-11-13 11:52:11.670168157 -0500 |
||||
@@ -788,9 +788,9 @@ cat >> conftest.c <<EOF |
||||
alpha-linux-gnu |
||||
# elif defined(__ARM_EABI__) && defined(__ARM_PCS_VFP) |
||||
# if defined(__ARMEL__) |
||||
- arm-linux-gnueabihf |
||||
+ arm-linux-gnueabi |
||||
# else |
||||
- armeb-linux-gnueabihf |
||||
+ armeb-linux-gnueabi |
||||
# endif |
||||
# elif defined(__ARM_EABI__) && !defined(__ARM_PCS_VFP) |
||||
# if defined(__ARMEL__) |
||||
@@ -810,7 +810,7 @@ cat >> conftest.c <<EOF |
||||
# elif _MIPS_SIM == _ABIN32 |
||||
mips64el-linux-gnuabin32 |
||||
# elif _MIPS_SIM == _ABI64 |
||||
- mips64el-linux-gnuabi64 |
||||
+ mips64el-linux-gnu |
||||
# else |
||||
# error unknown platform triplet |
||||
# endif |
||||
@@ -820,7 +820,7 @@ cat >> conftest.c <<EOF |
||||
# elif _MIPS_SIM == _ABIN32 |
||||
mips64-linux-gnuabin32 |
||||
# elif _MIPS_SIM == _ABI64 |
||||
- mips64-linux-gnuabi64 |
||||
+ mips64-linux-gnu |
||||
# else |
||||
# error unknown platform triplet |
||||
# endif |
||||
@@ -830,9 +830,9 @@ cat >> conftest.c <<EOF |
||||
powerpc-linux-gnuspe |
||||
# elif defined(__powerpc64__) |
||||
# if defined(__LITTLE_ENDIAN__) |
||||
- powerpc64le-linux-gnu |
||||
+ ppc64le-linux-gnu |
||||
# else |
||||
- powerpc64-linux-gnu |
||||
+ ppc64-linux-gnu |
||||
# endif |
||||
# elif defined(__powerpc__) |
||||
powerpc-linux-gnu |
||||
diff --git a/config.sub b/config.sub |
||||
index 40ea5df..932128b 100755 |
||||
--- a/config.sub |
||||
+++ b/config.sub |
||||
@@ -1045,7 +1045,7 @@ case $basic_machine in |
||||
;; |
||||
ppc64) basic_machine=powerpc64-unknown |
||||
;; |
||||
- ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` |
||||
+ ppc64-* | ppc64p7-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` |
||||
;; |
||||
ppc64le | powerpc64little) |
||||
basic_machine=powerpc64le-unknown |
@ -0,0 +1,228 @@
@@ -0,0 +1,228 @@
|
||||
diff --git a/Lib/ssl.py b/Lib/ssl.py |
||||
index 1f3a31a..b54a684 100644 |
||||
--- a/Lib/ssl.py |
||||
+++ b/Lib/ssl.py |
||||
@@ -116,6 +116,7 @@ except ImportError: |
||||
|
||||
|
||||
from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN, HAS_TLSv1_3 |
||||
+from _ssl import _DEFAULT_CIPHERS |
||||
from _ssl import _OPENSSL_API_VERSION |
||||
|
||||
|
||||
@@ -174,48 +175,7 @@ else: |
||||
CHANNEL_BINDING_TYPES = [] |
||||
|
||||
|
||||
-# Disable weak or insecure ciphers by default |
||||
-# (OpenSSL's default setting is 'DEFAULT:!aNULL:!eNULL') |
||||
-# Enable a better set of ciphers by default |
||||
-# This list has been explicitly chosen to: |
||||
-# * TLS 1.3 ChaCha20 and AES-GCM cipher suites |
||||
-# * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE) |
||||
-# * Prefer ECDHE over DHE for better performance |
||||
-# * Prefer AEAD over CBC for better performance and security |
||||
-# * Prefer AES-GCM over ChaCha20 because most platforms have AES-NI |
||||
-# (ChaCha20 needs OpenSSL 1.1.0 or patched 1.0.2) |
||||
-# * Prefer any AES-GCM and ChaCha20 over any AES-CBC for better |
||||
-# performance and security |
||||
-# * Then Use HIGH cipher suites as a fallback |
||||
-# * Disable NULL authentication, NULL encryption, 3DES and MD5 MACs |
||||
-# for security reasons |
||||
-_DEFAULT_CIPHERS = ( |
||||
- 'TLS13-AES-256-GCM-SHA384:TLS13-CHACHA20-POLY1305-SHA256:' |
||||
- 'TLS13-AES-128-GCM-SHA256:' |
||||
- 'ECDH+AESGCM:ECDH+CHACHA20:DH+AESGCM:DH+CHACHA20:ECDH+AES256:DH+AES256:' |
||||
- 'ECDH+AES128:DH+AES:ECDH+HIGH:DH+HIGH:RSA+AESGCM:RSA+AES:RSA+HIGH:' |
||||
- '!aNULL:!eNULL:!MD5:!3DES' |
||||
- ) |
||||
- |
||||
-# Restricted and more secure ciphers for the server side |
||||
-# This list has been explicitly chosen to: |
||||
-# * TLS 1.3 ChaCha20 and AES-GCM cipher suites |
||||
-# * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE) |
||||
-# * Prefer ECDHE over DHE for better performance |
||||
-# * Prefer AEAD over CBC for better performance and security |
||||
-# * Prefer AES-GCM over ChaCha20 because most platforms have AES-NI |
||||
-# * Prefer any AES-GCM and ChaCha20 over any AES-CBC for better |
||||
-# performance and security |
||||
-# * Then Use HIGH cipher suites as a fallback |
||||
-# * Disable NULL authentication, NULL encryption, MD5 MACs, DSS, RC4, and |
||||
-# 3DES for security reasons |
||||
-_RESTRICTED_SERVER_CIPHERS = ( |
||||
- 'TLS13-AES-256-GCM-SHA384:TLS13-CHACHA20-POLY1305-SHA256:' |
||||
- 'TLS13-AES-128-GCM-SHA256:' |
||||
- 'ECDH+AESGCM:ECDH+CHACHA20:DH+AESGCM:DH+CHACHA20:ECDH+AES256:DH+AES256:' |
||||
- 'ECDH+AES128:DH+AES:ECDH+HIGH:DH+HIGH:RSA+AESGCM:RSA+AES:RSA+HIGH:' |
||||
- '!aNULL:!eNULL:!MD5:!DSS:!RC4:!3DES' |
||||
-) |
||||
+_RESTRICTED_SERVER_CIPHERS = _DEFAULT_CIPHERS |
||||
|
||||
|
||||
class CertificateError(ValueError): |
||||
@@ -389,8 +349,6 @@ class SSLContext(_SSLContext): |
||||
|
||||
def __new__(cls, protocol=PROTOCOL_TLS, *args, **kwargs): |
||||
self = _SSLContext.__new__(cls, protocol) |
||||
- if protocol != _SSLv2_IF_EXISTS: |
||||
- self.set_ciphers(_DEFAULT_CIPHERS) |
||||
return self |
||||
|
||||
def __init__(self, protocol=PROTOCOL_TLS): |
||||
@@ -505,8 +463,6 @@ def create_default_context(purpose=Purpose.SERVER_AUTH, *, cafile=None, |
||||
# verify certs and host name in client mode |
||||
context.verify_mode = CERT_REQUIRED |
||||
context.check_hostname = True |
||||
- elif purpose == Purpose.CLIENT_AUTH: |
||||
- context.set_ciphers(_RESTRICTED_SERVER_CIPHERS) |
||||
|
||||
if cafile or capath or cadata: |
||||
context.load_verify_locations(cafile, capath, cadata) |
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py |
||||
index 9785a59..34a7ec2 100644 |
||||
--- a/Lib/test/test_ssl.py |
||||
+++ b/Lib/test/test_ssl.py |
||||
@@ -18,6 +18,7 @@ import asyncore |
||||
import weakref |
||||
import platform |
||||
import functools |
||||
+import sysconfig |
||||
try: |
||||
import ctypes |
||||
except ImportError: |
||||
@@ -36,7 +37,7 @@ PROTOCOLS = sorted(ssl._PROTOCOL_NAMES) |
||||
HOST = support.HOST |
||||
IS_LIBRESSL = ssl.OPENSSL_VERSION.startswith('LibreSSL') |
||||
IS_OPENSSL_1_1 = not IS_LIBRESSL and ssl.OPENSSL_VERSION_INFO >= (1, 1, 0) |
||||
- |
||||
+PY_SSL_DEFAULT_CIPHERS = sysconfig.get_config_var('PY_SSL_DEFAULT_CIPHERS') |
||||
|
||||
def data_file(*name): |
||||
return os.path.join(os.path.dirname(__file__), *name) |
||||
@@ -889,6 +890,19 @@ class ContextTests(unittest.TestCase): |
||||
with self.assertRaisesRegex(ssl.SSLError, "No cipher can be selected"): |
||||
ctx.set_ciphers("^$:,;?*'dorothyx") |
||||
|
||||
+ @unittest.skipUnless(PY_SSL_DEFAULT_CIPHERS == 1, |
||||
+ "Test applies only to Python default ciphers") |
||||
+ def test_python_ciphers(self): |
||||
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) |
||||
+ ciphers = ctx.get_ciphers() |
||||
+ for suite in ciphers: |
||||
+ name = suite['name'] |
||||
+ self.assertNotIn("PSK", name) |
||||
+ self.assertNotIn("SRP", name) |
||||
+ self.assertNotIn("MD5", name) |
||||
+ self.assertNotIn("RC4", name) |
||||
+ self.assertNotIn("3DES", name) |
||||
+ |
||||
@unittest.skipIf(ssl.OPENSSL_VERSION_INFO < (1, 0, 2, 0, 0), 'OpenSSL too old') |
||||
def test_get_ciphers(self): |
||||
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) |
||||
diff --git a/Modules/_ssl.c b/Modules/_ssl.c |
||||
index 5e007da..130f006 100644 |
||||
--- a/Modules/_ssl.c |
||||
+++ b/Modules/_ssl.c |
||||
@@ -237,6 +237,31 @@ SSL_SESSION_get_ticket_lifetime_hint(const SSL_SESSION *s) |
||||
|
||||
#endif /* OpenSSL < 1.1.0 or LibreSSL < 2.7.0 */ |
||||
|
||||
+/* Default cipher suites */ |
||||
+#ifndef PY_SSL_DEFAULT_CIPHERS |
||||
+#define PY_SSL_DEFAULT_CIPHERS 1 |
||||
+#endif |
||||
+ |
||||
+#if PY_SSL_DEFAULT_CIPHERS == 0 |
||||
+ #ifndef PY_SSL_DEFAULT_CIPHER_STRING |
||||
+ #error "Py_SSL_DEFAULT_CIPHERS 0 needs Py_SSL_DEFAULT_CIPHER_STRING" |
||||
+ #endif |
||||
+#elif PY_SSL_DEFAULT_CIPHERS == 1 |
||||
+/* Python custom selection of sensible ciper suites |
||||
+ * DEFAULT: OpenSSL's default cipher list. Since 1.0.2 the list is in sensible order. |
||||
+ * !aNULL:!eNULL: really no NULL ciphers |
||||
+ * !MD5:!3DES:!DES:!RC4:!IDEA:!SEED: no weak or broken algorithms on old OpenSSL versions. |
||||
+ * !aDSS: no authentication with discrete logarithm DSA algorithm |
||||
+ * !SRP:!PSK: no secure remote password or pre-shared key authentication |
||||
+ */ |
||||
+ #define PY_SSL_DEFAULT_CIPHER_STRING "DEFAULT:!aNULL:!eNULL:!MD5:!3DES:!DES:!RC4:!IDEA:!SEED:!aDSS:!SRP:!PSK" |
||||
+#elif PY_SSL_DEFAULT_CIPHERS == 2 |
||||
+/* Ignored in SSLContext constructor, only used to as _ssl.DEFAULT_CIPHER_STRING */ |
||||
+ #define PY_SSL_DEFAULT_CIPHER_STRING SSL_DEFAULT_CIPHER_LIST |
||||
+#else |
||||
+ #error "Unsupported PY_SSL_DEFAULT_CIPHERS" |
||||
+#endif |
||||
+ |
||||
|
||||
enum py_ssl_error { |
||||
/* these mirror ssl.h */ |
||||
@@ -2803,7 +2828,12 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version) |
||||
/* A bare minimum cipher list without completely broken cipher suites. |
||||
* It's far from perfect but gives users a better head start. */ |
||||
if (proto_version != PY_SSL_VERSION_SSL2) { |
||||
- result = SSL_CTX_set_cipher_list(ctx, "HIGH:!aNULL:!eNULL:!MD5"); |
||||
+#if PY_SSL_DEFAULT_CIPHERS == 2 |
||||
+ /* stick to OpenSSL's default settings */ |
||||
+ result = 1; |
||||
+#else |
||||
+ result = SSL_CTX_set_cipher_list(ctx, PY_SSL_DEFAULT_CIPHER_STRING); |
||||
+#endif |
||||
} else { |
||||
/* SSLv2 needs MD5 */ |
||||
result = SSL_CTX_set_cipher_list(ctx, "HIGH:!aNULL:!eNULL"); |
||||
@@ -5343,6 +5373,9 @@ PyInit__ssl(void) |
||||
(PyObject *)&PySSLSession_Type) != 0) |
||||
return NULL; |
||||
|
||||
+ PyModule_AddStringConstant(m, "_DEFAULT_CIPHERS", |
||||
+ PY_SSL_DEFAULT_CIPHER_STRING); |
||||
+ |
||||
PyModule_AddIntConstant(m, "SSL_ERROR_ZERO_RETURN", |
||||
PY_SSL_ERROR_ZERO_RETURN); |
||||
PyModule_AddIntConstant(m, "SSL_ERROR_WANT_READ", |
||||
diff --git a/configure.ac b/configure.ac |
||||
index 3703701..2eff514 100644 |
||||
--- a/configure.ac |
||||
+++ b/configure.ac |
||||
@@ -5598,6 +5598,42 @@ if test "$have_getrandom" = yes; then |
||||
[Define to 1 if the getrandom() function is available]) |
||||
fi |
||||
|
||||
+# ssl module default cipher suite string |
||||
+AH_TEMPLATE(PY_SSL_DEFAULT_CIPHERS, |
||||
+ [Default cipher suites list for ssl module. |
||||
+ 1: Python's preferred selection, 2: leave OpenSSL defaults untouched, 0: custom string]) |
||||
+AH_TEMPLATE(PY_SSL_DEFAULT_CIPHER_STRING, |
||||
+ [Cipher suite string for PY_SSL_DEFAULT_CIPHERS=0] |
||||
+) |
||||
+AC_MSG_CHECKING(for --with-ssl-default-suites) |
||||
+AC_ARG_WITH(ssl-default-suites, |
||||
+ AS_HELP_STRING([--with-ssl-default-suites=@<:@python|openssl|STRING@:>@], |
||||
+ [Override default cipher suites string, |
||||
+ python: use Python's preferred selection (default), |
||||
+ openssl: leave OpenSSL's defaults untouched, |
||||
+ STRING: use a custom string, |
||||
+ PROTOCOL_SSLv2 ignores the setting]), |
||||
+[ |
||||
+AC_MSG_RESULT($withval) |
||||
+case "$withval" in |
||||
+ python) |
||||
+ AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 1) |
||||
+ ;; |
||||
+ openssl) |
||||
+ AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 2) |
||||
+ ;; |
||||
+ *) |
||||
+ AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 0) |
||||
+ AC_DEFINE_UNQUOTED(PY_SSL_DEFAULT_CIPHER_STRING, "$withval") |
||||
+ ;; |
||||
+esac |
||||
+], |
||||
+[ |
||||
+AC_MSG_RESULT(python) |
||||
+AC_DEFINE(PY_SSL_DEFAULT_CIPHERS, 1) |
||||
+]) |
||||
+ |
||||
+ |
||||
# generate output files |
||||
AC_CONFIG_FILES(Makefile.pre Modules/Setup.config Misc/python.pc Misc/python-config.sh) |
||||
AC_CONFIG_FILES([Modules/ld_so_aix], [chmod +x Modules/ld_so_aix]) |
@ -0,0 +1,111 @@
@@ -0,0 +1,111 @@
|
||||
From c660debb97f4f422255a82fef2d77804552c043a Mon Sep 17 00:00:00 2001 |
||||
From: Christian Heimes <christian@python.org> |
||||
Date: Tue, 15 Jan 2019 18:16:30 +0100 |
||||
Subject: [PATCH] bpo-35746: Fix segfault in ssl's cert parser |
||||
|
||||
CVE-2019-5010, Fix a NULL pointer deref in ssl module. The cert parser did |
||||
not handle CRL distribution points with empty DP or URI correctly. A |
||||
malicious or buggy certificate can result into segfault. |
||||
|
||||
Signed-off-by: Christian Heimes <christian@python.org> |
||||
--- |
||||
Lib/test/talos-2019-0758.pem | 22 +++++++++++++++++++ |
||||
Lib/test/test_ssl.py | 22 +++++++++++++++++++ |
||||
.../2019-01-15-18-16-05.bpo-35746.nMSd0j.rst | 3 +++ |
||||
Modules/_ssl.c | 4 ++++ |
||||
4 files changed, 51 insertions(+) |
||||
create mode 100644 Lib/test/talos-2019-0758.pem |
||||
create mode 100644 Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst |
||||
|
||||
diff --git a/Lib/test/talos-2019-0758.pem b/Lib/test/talos-2019-0758.pem |
||||
new file mode 100644 |
||||
index 000000000000..13b95a77fd8a |
||||
--- /dev/null |
||||
+++ b/Lib/test/talos-2019-0758.pem |
||||
@@ -0,0 +1,22 @@ |
||||
+-----BEGIN CERTIFICATE----- |
||||
+MIIDqDCCApKgAwIBAgIBAjALBgkqhkiG9w0BAQswHzELMAkGA1UEBhMCVUsxEDAO |
||||
+BgNVBAMTB2NvZHktY2EwHhcNMTgwNjE4MTgwMDU4WhcNMjgwNjE0MTgwMDU4WjA7 |
||||
+MQswCQYDVQQGEwJVSzEsMCoGA1UEAxMjY29kZW5vbWljb24tdm0tMi50ZXN0Lmxh |
||||
+bC5jaXNjby5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC63fGB |
||||
+J80A9Av1GB0bptslKRIUtJm8EeEu34HkDWbL6AJY0P8WfDtlXjlPaLqFa6sqH6ES |
||||
+V48prSm1ZUbDSVL8R6BYVYpOlK8/48xk4pGTgRzv69gf5SGtQLwHy8UPBKgjSZoD |
||||
+5a5k5wJXGswhKFFNqyyxqCvWmMnJWxXTt2XDCiWc4g4YAWi4O4+6SeeHVAV9rV7C |
||||
+1wxqjzKovVe2uZOHjKEzJbbIU6JBPb6TRfMdRdYOw98n1VXDcKVgdX2DuuqjCzHP |
||||
+WhU4Tw050M9NaK3eXp4Mh69VuiKoBGOLSOcS8reqHIU46Reg0hqeL8LIL6OhFHIF |
||||
+j7HR6V1X6F+BfRS/AgMBAAGjgdYwgdMwCQYDVR0TBAIwADAdBgNVHQ4EFgQUOktp |
||||
+HQjxDXXUg8prleY9jeLKeQ4wTwYDVR0jBEgwRoAUx6zgPygZ0ZErF9sPC4+5e2Io |
||||
+UU+hI6QhMB8xCzAJBgNVBAYTAlVLMRAwDgYDVQQDEwdjb2R5LWNhggkA1QEAuwb7 |
||||
+2s0wCQYDVR0SBAIwADAuBgNVHREEJzAlgiNjb2Rlbm9taWNvbi12bS0yLnRlc3Qu |
||||
+bGFsLmNpc2NvLmNvbTAOBgNVHQ8BAf8EBAMCBaAwCwYDVR0fBAQwAjAAMAsGCSqG |
||||
+SIb3DQEBCwOCAQEAvqantx2yBlM11RoFiCfi+AfSblXPdrIrHvccepV4pYc/yO6p |
||||
+t1f2dxHQb8rWH3i6cWag/EgIZx+HJQvo0rgPY1BFJsX1WnYf1/znZpkUBGbVmlJr |
||||
+t/dW1gSkNS6sPsM0Q+7HPgEv8CPDNK5eo7vU2seE0iWOkxSyVUuiCEY9ZVGaLVit |
||||
+p0C78nZ35Pdv4I+1cosmHl28+es1WI22rrnmdBpH8J1eY6WvUw2xuZHLeNVN0TzV |
||||
+Q3qq53AaCWuLOD1AjESWuUCxMZTK9DPS4JKXTK8RLyDeqOvJGjsSWp3kL0y3GaQ+ |
||||
+10T1rfkKJub2+m9A9duin1fn6tHc2wSvB7m3DA== |
||||
+-----END CERTIFICATE----- |
||||
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py |
||||
index 7f6b93148f45..1fc657f4d867 100644 |
||||
--- a/Lib/test/test_ssl.py |
||||
+++ b/Lib/test/test_ssl.py |
||||
@@ -115,6 +115,7 @@ def data_file(*name): |
||||
BADKEY = data_file("badkey.pem") |
||||
NOKIACERT = data_file("nokia.pem") |
||||
NULLBYTECERT = data_file("nullbytecert.pem") |
||||
+TALOS_INVALID_CRLDP = data_file("talos-2019-0758.pem") |
||||
|
||||
DHFILE = data_file("ffdh3072.pem") |
||||
BYTES_DHFILE = os.fsencode(DHFILE) |
||||
@@ -348,6 +349,27 @@ def test_parse_cert(self): |
||||
self.assertEqual(p['crlDistributionPoints'], |
||||
('http://SVRIntl-G3-crl.verisign.com/SVRIntlG3.crl',)) |
||||
|
||||
+ def test_parse_cert_CVE_2019_5010(self): |
||||
+ p = ssl._ssl._test_decode_cert(TALOS_INVALID_CRLDP) |
||||
+ if support.verbose: |
||||
+ sys.stdout.write("\n" + pprint.pformat(p) + "\n") |
||||
+ self.assertEqual( |
||||
+ p, |
||||
+ { |
||||
+ 'issuer': ( |
||||
+ (('countryName', 'UK'),), (('commonName', 'cody-ca'),)), |
||||
+ 'notAfter': 'Jun 14 18:00:58 2028 GMT', |
||||
+ 'notBefore': 'Jun 18 18:00:58 2018 GMT', |
||||
+ 'serialNumber': '02', |
||||
+ 'subject': ((('countryName', 'UK'),), |
||||
+ (('commonName', |
||||
+ 'codenomicon-vm-2.test.lal.cisco.com'),)), |
||||
+ 'subjectAltName': ( |
||||
+ ('DNS', 'codenomicon-vm-2.test.lal.cisco.com'),), |
||||
+ 'version': 3 |
||||
+ } |
||||
+ ) |
||||
+ |
||||
def test_parse_cert_CVE_2013_4238(self): |
||||
p = ssl._ssl._test_decode_cert(NULLBYTECERT) |
||||
if support.verbose: |
||||
diff --git a/Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst b/Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst |
||||
new file mode 100644 |
||||
index 000000000000..dffe347eec84 |
||||
--- /dev/null |
||||
+++ b/Misc/NEWS.d/next/Security/2019-01-15-18-16-05.bpo-35746.nMSd0j.rst |
||||
@@ -0,0 +1,3 @@ |
||||
+[CVE-2019-5010] Fix a NULL pointer deref in ssl module. The cert parser did |
||||
+not handle CRL distribution points with empty DP or URI correctly. A |
||||
+malicious or buggy certificate can result into segfault. |
||||
diff --git a/Modules/_ssl.c b/Modules/_ssl.c |
||||
index 4e3352d9e661..0e720e268d93 100644 |
||||
--- a/Modules/_ssl.c |
||||
+++ b/Modules/_ssl.c |
||||
@@ -1515,6 +1515,10 @@ _get_crl_dp(X509 *certificate) { |
||||
STACK_OF(GENERAL_NAME) *gns; |
||||
|
||||
dp = sk_DIST_POINT_value(dps, i); |
||||
+ if (dp->distpoint == NULL) { |
||||
+ /* Ignore empty DP value, CVE-2019-5010 */ |
||||
+ continue; |
||||
+ } |
||||
gns = dp->distpoint->name.fullname; |
||||
|
||||
for (j=0; j < sk_GENERAL_NAME_num(gns); j++) { |
@ -0,0 +1,192 @@
@@ -0,0 +1,192 @@
|
||||
From 23fc0416454c4ad5b9b23d520fbe6d89be3efc24 Mon Sep 17 00:00:00 2001 |
||||
From: Steve Dower <steve.dower@microsoft.com> |
||||
Date: Mon, 11 Mar 2019 21:34:03 -0700 |
||||
Subject: [PATCH] [3.6] bpo-36216: Add check for characters in netloc that |
||||
normalize to separators (GH-12201) (GH-12215) |
||||
|
||||
--- |
||||
Doc/library/urllib.parse.rst | 18 +++++++++++++++ |
||||
Lib/test/test_urlparse.py | 23 +++++++++++++++++++ |
||||
Lib/urllib/parse.py | 17 ++++++++++++++ |
||||
.../2019-03-06-09-38-40.bpo-36216.6q1m4a.rst | 3 +++ |
||||
4 files changed, 61 insertions(+) |
||||
create mode 100644 Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst |
||||
|
||||
diff --git a/Doc/library/urllib.parse.rst b/Doc/library/urllib.parse.rst |
||||
index d991254d5ca1..647af613a315 100644 |
||||
--- a/Doc/library/urllib.parse.rst |
||||
+++ b/Doc/library/urllib.parse.rst |
||||
@@ -121,6 +121,11 @@ or on combining URL components into a URL string. |
||||
Unmatched square brackets in the :attr:`netloc` attribute will raise a |
||||
:exc:`ValueError`. |
||||
|
||||
+ Characters in the :attr:`netloc` attribute that decompose under NFKC |
||||
+ normalization (as used by the IDNA encoding) into any of ``/``, ``?``, |
||||
+ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is |
||||
+ decomposed before parsing, no error will be raised. |
||||
+ |
||||
.. versionchanged:: 3.2 |
||||
Added IPv6 URL parsing capabilities. |
||||
|
||||
@@ -133,6 +138,10 @@ or on combining URL components into a URL string. |
||||
Out-of-range port numbers now raise :exc:`ValueError`, instead of |
||||
returning :const:`None`. |
||||
|
||||
+ .. versionchanged:: 3.6.9 |
||||
+ Characters that affect netloc parsing under NFKC normalization will |
||||
+ now raise :exc:`ValueError`. |
||||
+ |
||||
|
||||
.. function:: parse_qs(qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None) |
||||
|
||||
@@ -256,10 +265,19 @@ or on combining URL components into a URL string. |
||||
Unmatched square brackets in the :attr:`netloc` attribute will raise a |
||||
:exc:`ValueError`. |
||||
|
||||
+ Characters in the :attr:`netloc` attribute that decompose under NFKC |
||||
+ normalization (as used by the IDNA encoding) into any of ``/``, ``?``, |
||||
+ ``#``, ``@``, or ``:`` will raise a :exc:`ValueError`. If the URL is |
||||
+ decomposed before parsing, no error will be raised. |
||||
+ |
||||
.. versionchanged:: 3.6 |
||||
Out-of-range port numbers now raise :exc:`ValueError`, instead of |
||||
returning :const:`None`. |
||||
|
||||
+ .. versionchanged:: 3.6.9 |
||||
+ Characters that affect netloc parsing under NFKC normalization will |
||||
+ now raise :exc:`ValueError`. |
||||
+ |
||||
|
||||
.. function:: urlunsplit(parts) |
||||
|
||||
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py |
||||
index be50b47603aa..e6638aee2244 100644 |
||||
--- a/Lib/test/test_urlparse.py |
||||
+++ b/Lib/test/test_urlparse.py |
||||
@@ -1,3 +1,5 @@ |
||||
+import sys |
||||
+import unicodedata |
||||
import unittest |
||||
import urllib.parse |
||||
|
||||
@@ -984,6 +986,27 @@ def test_all(self): |
||||
expected.append(name) |
||||
self.assertCountEqual(urllib.parse.__all__, expected) |
||||
|
||||
+ def test_urlsplit_normalization(self): |
||||
+ # Certain characters should never occur in the netloc, |
||||
+ # including under normalization. |
||||
+ # Ensure that ALL of them are detected and cause an error |
||||
+ illegal_chars = '/:#?@' |
||||
+ hex_chars = {'{:04X}'.format(ord(c)) for c in illegal_chars} |
||||
+ denorm_chars = [ |
||||
+ c for c in map(chr, range(128, sys.maxunicode)) |
||||
+ if (hex_chars & set(unicodedata.decomposition(c).split())) |
||||
+ and c not in illegal_chars |
||||
+ ] |
||||
+ # Sanity check that we found at least one such character |
||||
+ self.assertIn('\u2100', denorm_chars) |
||||
+ self.assertIn('\uFF03', denorm_chars) |
||||
+ |
||||
+ for scheme in ["http", "https", "ftp"]: |
||||
+ for c in denorm_chars: |
||||
+ url = "{}://netloc{}false.netloc/path".format(scheme, c) |
||||
+ with self.subTest(url=url, char='{:04X}'.format(ord(c))): |
||||
+ with self.assertRaises(ValueError): |
||||
+ urllib.parse.urlsplit(url) |
||||
|
||||
class Utility_Tests(unittest.TestCase): |
||||
"""Testcase to test the various utility functions in the urllib.""" |
||||
diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py |
||||
index 85e68c8b42c7..7b06f4d71d67 100644 |
||||
--- a/Lib/urllib/parse.py |
||||
+++ b/Lib/urllib/parse.py |
||||
@@ -391,6 +391,21 @@ def _splitnetloc(url, start=0): |
||||
delim = min(delim, wdelim) # use earliest delim position |
||||
return url[start:delim], url[delim:] # return (domain, rest) |
||||
|
||||
+def _checknetloc(netloc): |
||||
+ if not netloc or not any(ord(c) > 127 for c in netloc): |
||||
+ return |
||||
+ # looking for characters like \u2100 that expand to 'a/c' |
||||
+ # IDNA uses NFKC equivalence, so normalize for this check |
||||
+ import unicodedata |
||||
+ netloc2 = unicodedata.normalize('NFKC', netloc) |
||||
+ if netloc == netloc2: |
||||
+ return |
||||
+ _, _, netloc = netloc.rpartition('@') # anything to the left of '@' is okay |
||||
+ for c in '/?#@:': |
||||
+ if c in netloc2: |
||||
+ raise ValueError("netloc '" + netloc2 + "' contains invalid " + |
||||
+ "characters under NFKC normalization") |
||||
+ |
||||
def urlsplit(url, scheme='', allow_fragments=True): |
||||
"""Parse a URL into 5 components: |
||||
<scheme>://<netloc>/<path>?<query>#<fragment> |
||||
@@ -420,6 +435,7 @@ def urlsplit(url, scheme='', allow_fragments=True): |
||||
url, fragment = url.split('#', 1) |
||||
if '?' in url: |
||||
url, query = url.split('?', 1) |
||||
+ _checknetloc(netloc) |
||||
v = SplitResult(scheme, netloc, url, query, fragment) |
||||
_parse_cache[key] = v |
||||
return _coerce_result(v) |
||||
@@ -443,6 +459,7 @@ def urlsplit(url, scheme='', allow_fragments=True): |
||||
url, fragment = url.split('#', 1) |
||||
if '?' in url: |
||||
url, query = url.split('?', 1) |
||||
+ _checknetloc(netloc) |
||||
v = SplitResult(scheme, netloc, url, query, fragment) |
||||
_parse_cache[key] = v |
||||
return _coerce_result(v) |
||||
diff --git a/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst b/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst |
||||
new file mode 100644 |
||||
index 000000000000..5546394157f9 |
||||
--- /dev/null |
||||
+++ b/Misc/NEWS.d/next/Security/2019-03-06-09-38-40.bpo-36216.6q1m4a.rst |
||||
@@ -0,0 +1,3 @@ |
||||
+Changes urlsplit() to raise ValueError when the URL contains characters that |
||||
+decompose under IDNA encoding (NFKC-normalization) into characters that |
||||
+affect how the URL is parsed. |
||||
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py |
||||
index 0faf2bb..d0365ec 100644 |
||||
--- a/Lib/test/test_urlparse.py |
||||
+++ b/Lib/test/test_urlparse.py |
||||
@@ -1011,6 +1011,12 @@ class UrlParseTestCase(unittest.TestCase): |
||||
self.assertIn('\u2100', denorm_chars) |
||||
self.assertIn('\uFF03', denorm_chars) |
||||
|
||||
+ # bpo-36742: Verify port separators are ignored when they |
||||
+ # existed prior to decomposition |
||||
+ urllib.parse.urlsplit('http://\u30d5\u309a:80') |
||||
+ with self.assertRaises(ValueError): |
||||
+ urllib.parse.urlsplit('http://\u30d5\u309a\ufe1380') |
||||
+ |
||||
for scheme in ["http", "https", "ftp"]: |
||||
for c in denorm_chars: |
||||
url = "{}://netloc{}false.netloc/path".format(scheme, c) |
||||
diff --git a/Lib/urllib/parse.py b/Lib/urllib/parse.py |
||||
index 8b6c9b1..e2f7b69 100644 |
||||
--- a/Lib/urllib/parse.py |
||||
+++ b/Lib/urllib/parse.py |
||||
@@ -402,13 +402,16 @@ def _checknetloc(netloc): |
||||
# looking for characters like \u2100 that expand to 'a/c' |
||||
# IDNA uses NFKC equivalence, so normalize for this check |
||||
import unicodedata |
||||
- netloc2 = unicodedata.normalize('NFKC', netloc) |
||||
- if netloc == netloc2: |
||||
+ n = netloc.rpartition('@')[2] # ignore anything to the left of '@' |
||||
+ n = n.replace(':', '') # ignore characters already included |
||||
+ n = n.replace('#', '') # but not the surrounding text |
||||
+ n = n.replace('?', '') |
||||
+ netloc2 = unicodedata.normalize('NFKC', n) |
||||
+ if n == netloc2: |
||||
return |
||||
- _, _, netloc = netloc.rpartition('@') # anything to the left of '@' is okay |
||||
for c in '/?#@:': |
||||
if c in netloc2: |
||||
- raise ValueError("netloc '" + netloc2 + "' contains invalid " + |
||||
+ raise ValueError("netloc '" + netloc + "' contains invalid " + |
||||
"characters under NFKC normalization") |
||||
|
||||
def urlsplit(url, scheme='', allow_fragments=True): |
@ -0,0 +1,54 @@
@@ -0,0 +1,54 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 |
||||
From: =?UTF-8?q?Miro=20Hron=C4=8Dok?= <miro@hroncok.cz> |
||||
Date: Thu, 11 Jul 2019 13:44:13 +0200 |
||||
Subject: [PATCH] 00328: Restore pyc to TIMESTAMP invalidation mode as default |
||||
in rpmbuild |
||||
|
||||
Since Fedora 31, the $SOURCE_DATE_EPOCH is set in rpmbuild to the latest |
||||
%changelog date. This makes Python default to the CHECKED_HASH pyc |
||||
invalidation mode, bringing more reproducible builds traded for an import |
||||
performance decrease. To avoid that, we don't default to CHECKED_HASH |
||||
when $RPM_BUILD_ROOT is set (i.e. when we are building RPM packages). |
||||
|
||||
See https://src.fedoraproject.org/rpms/redhat-rpm-config/pull-request/57#comment-27426 |
||||
Downstream only: only used when building RPM packages |
||||
Ideally, we should talk to upstream and explain why we don't want this |
||||
--- |
||||
Lib/py_compile.py | 3 ++- |
||||
Lib/test/test_py_compile.py | 2 ++ |
||||
2 files changed, 4 insertions(+), 1 deletion(-) |
||||
|
||||
diff --git a/Lib/py_compile.py b/Lib/py_compile.py |
||||
index a81f493731..bba3642bf2 100644 |
||||
--- a/Lib/py_compile.py |
||||
+++ b/Lib/py_compile.py |
||||
@@ -70,7 +70,8 @@ class PycInvalidationMode(enum.Enum): |
||||
|
||||
|
||||
def _get_default_invalidation_mode(): |
||||
- if os.environ.get('SOURCE_DATE_EPOCH'): |
||||
+ if (os.environ.get('SOURCE_DATE_EPOCH') and not |
||||
+ os.environ.get('RPM_BUILD_ROOT')): |
||||
return PycInvalidationMode.CHECKED_HASH |
||||
else: |
||||
return PycInvalidationMode.TIMESTAMP |
||||
diff --git a/Lib/test/test_py_compile.py b/Lib/test/test_py_compile.py |
||||
index e6791c6916..b2d3dcf7fb 100644 |
||||
--- a/Lib/test/test_py_compile.py |
||||
+++ b/Lib/test/test_py_compile.py |
||||
@@ -19,6 +19,7 @@ def without_source_date_epoch(fxn): |
||||
def wrapper(*args, **kwargs): |
||||
with support.EnvironmentVarGuard() as env: |
||||
env.unset('SOURCE_DATE_EPOCH') |
||||
+ env.unset('RPM_BUILD_ROOT') |
||||
return fxn(*args, **kwargs) |
||||
return wrapper |
||||
|
||||
@@ -29,6 +30,7 @@ def with_source_date_epoch(fxn): |
||||
def wrapper(*args, **kwargs): |
||||
with support.EnvironmentVarGuard() as env: |
||||
env['SOURCE_DATE_EPOCH'] = '123456789' |
||||
+ env.unset('RPM_BUILD_ROOT') |
||||
return fxn(*args, **kwargs) |
||||
return wrapper |
||||
|
@ -0,0 +1,97 @@
@@ -0,0 +1,97 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 |
||||
From: Lumir Balhar <lbalhar@redhat.com> |
||||
Date: Tue, 4 Aug 2020 12:04:03 +0200 |
||||
Subject: [PATCH] 00353: Original names for architectures with different names |
||||
downstream |
||||
MIME-Version: 1.0 |
||||
Content-Type: text/plain; charset=UTF-8 |
||||
Content-Transfer-Encoding: 8bit |
||||
|
||||
https://fedoraproject.org/wiki/Changes/Python_Upstream_Architecture_Names |
||||
|
||||
Pythons in RHEL/Fedora used different names for some architectures |
||||
than upstream and other distros (for example ppc64 vs. powerpc64). |
||||
This was patched in patch 274, now it is sedded if %with legacy_archnames. |
||||
|
||||
That meant that an extension built with the default upstream settings |
||||
(on other distro or as an manylinux wheel) could not been found by Python |
||||
on RHEL/Fedora because it had a different suffix. |
||||
This patch adds the legacy names to importlib so Python is able |
||||
to import extensions with a legacy architecture name in its |
||||
file name. |
||||
It work both ways, so it support both %with and %without legacy_archnames. |
||||
|
||||
WARNING: This patch has no effect on Python built with bootstrap |
||||
enabled because Python/importlib_external.h is not regenerated |
||||
and therefore Python during bootstrap contains importlib from |
||||
upstream without this feature. It's possible to include |
||||
Python/importlib_external.h to this patch but it'd make rebasing |
||||
a nightmare because it's basically a binary file. |
||||
|
||||
Co-authored-by: Miro Hrončok <miro@hroncok.cz> |
||||
--- |
||||
Lib/importlib/_bootstrap_external.py | 40 ++++++++++++++++++++++++++-- |
||||
1 file changed, 38 insertions(+), 2 deletions(-) |
||||
|
||||
diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py |
||||
index b8ac482994..62e937b819 100644 |
||||
--- a/Lib/importlib/_bootstrap_external.py |
||||
+++ b/Lib/importlib/_bootstrap_external.py |
||||
@@ -1559,7 +1559,7 @@ def _get_supported_file_loaders(): |
||||
|
||||
Each item is a tuple (loader, suffixes). |
||||
""" |
||||
- extensions = ExtensionFileLoader, _imp.extension_suffixes() |
||||
+ extensions = ExtensionFileLoader, _alternative_architectures(_imp.extension_suffixes()) |
||||
source = SourceFileLoader, SOURCE_SUFFIXES |
||||
bytecode = SourcelessFileLoader, BYTECODE_SUFFIXES |
||||
return [extensions, source, bytecode] |
||||
@@ -1623,7 +1623,7 @@ def _setup(_bootstrap_module): |
||||
|
||||
# Constants |
||||
setattr(self_module, '_relax_case', _make_relax_case()) |
||||
- EXTENSION_SUFFIXES.extend(_imp.extension_suffixes()) |
||||
+ EXTENSION_SUFFIXES.extend(_alternative_architectures(_imp.extension_suffixes())) |
||||
if builtin_os == 'nt': |
||||
SOURCE_SUFFIXES.append('.pyw') |
||||
if '_d.pyd' in EXTENSION_SUFFIXES: |
||||
@@ -1636,3 +1636,39 @@ def _install(_bootstrap_module): |
||||
supported_loaders = _get_supported_file_loaders() |
||||
sys.path_hooks.extend([FileFinder.path_hook(*supported_loaders)]) |
||||
sys.meta_path.append(PathFinder) |
||||
+ |
||||
+ |
||||
+_ARCH_MAP = { |
||||
+ "-arm-linux-gnueabi.": "-arm-linux-gnueabihf.", |
||||
+ "-armeb-linux-gnueabi.": "-armeb-linux-gnueabihf.", |
||||
+ "-mips64-linux-gnu.": "-mips64-linux-gnuabi64.", |
||||
+ "-mips64el-linux-gnu.": "-mips64el-linux-gnuabi64.", |
||||
+ "-ppc-linux-gnu.": "-powerpc-linux-gnu.", |
||||
+ "-ppc-linux-gnuspe.": "-powerpc-linux-gnuspe.", |
||||
+ "-ppc64-linux-gnu.": "-powerpc64-linux-gnu.", |
||||
+ "-ppc64le-linux-gnu.": "-powerpc64le-linux-gnu.", |
||||
+ # The above, but the other way around: |
||||
+ "-arm-linux-gnueabihf.": "-arm-linux-gnueabi.", |
||||
+ "-armeb-linux-gnueabihf.": "-armeb-linux-gnueabi.", |
||||
+ "-mips64-linux-gnuabi64.": "-mips64-linux-gnu.", |
||||
+ "-mips64el-linux-gnuabi64.": "-mips64el-linux-gnu.", |
||||
+ "-powerpc-linux-gnu.": "-ppc-linux-gnu.", |
||||
+ "-powerpc-linux-gnuspe.": "-ppc-linux-gnuspe.", |
||||
+ "-powerpc64-linux-gnu.": "-ppc64-linux-gnu.", |
||||
+ "-powerpc64le-linux-gnu.": "-ppc64le-linux-gnu.", |
||||
+} |
||||
+ |
||||
+ |
||||
+def _alternative_architectures(suffixes): |
||||
+ """Add a suffix with an alternative architecture name |
||||
+ to the list of suffixes so an extension built with |
||||
+ the default (upstream) setting is loadable with our Pythons |
||||
+ """ |
||||
+ |
||||
+ for suffix in suffixes: |
||||
+ for original, alternative in _ARCH_MAP.items(): |
||||
+ if original in suffix: |
||||
+ suffixes.append(suffix.replace(original, alternative)) |
||||
+ return suffixes |
||||
+ |
||||
+ return suffixes |
@ -0,0 +1,184 @@
@@ -0,0 +1,184 @@
|
||||
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 |
||||
From: "Miss Islington (bot)" |
||||
<31488909+miss-islington@users.noreply.github.com> |
||||
Date: Mon, 18 Jan 2021 13:28:52 -0800 |
||||
Subject: [PATCH] 00357: bpo-42938: Replace snprintf with Python unicode |
||||
formatting in ctypes param reprs. (GH-24248) |
||||
|
||||
(cherry picked from commit 916610ef90a0d0761f08747f7b0905541f0977c7) |
||||
|
||||
Co-authored-by: Benjamin Peterson <benjamin@python.org> |
||||
--- |
||||
Lib/ctypes/test/test_parameters.py | 43 ++++++++++++++++ |
||||
.../2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst | 2 + |
||||
Modules/_ctypes/callproc.c | 51 +++++++------------ |
||||
3 files changed, 64 insertions(+), 32 deletions(-) |
||||
create mode 100644 Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst |
||||
|
||||
diff --git a/Lib/ctypes/test/test_parameters.py b/Lib/ctypes/test/test_parameters.py |
||||
index e4c25fd880..531894fdec 100644 |
||||
--- a/Lib/ctypes/test/test_parameters.py |
||||
+++ b/Lib/ctypes/test/test_parameters.py |
||||
@@ -201,6 +201,49 @@ class SimpleTypesTestCase(unittest.TestCase): |
||||
with self.assertRaises(ZeroDivisionError): |
||||
WorseStruct().__setstate__({}, b'foo') |
||||
|
||||
+ def test_parameter_repr(self): |
||||
+ from ctypes import ( |
||||
+ c_bool, |
||||
+ c_char, |
||||
+ c_wchar, |
||||
+ c_byte, |
||||
+ c_ubyte, |
||||
+ c_short, |
||||
+ c_ushort, |
||||
+ c_int, |
||||
+ c_uint, |
||||
+ c_long, |
||||
+ c_ulong, |
||||
+ c_longlong, |
||||
+ c_ulonglong, |
||||
+ c_float, |
||||
+ c_double, |
||||
+ c_longdouble, |
||||
+ c_char_p, |
||||
+ c_wchar_p, |
||||
+ c_void_p, |
||||
+ ) |
||||
+ self.assertRegex(repr(c_bool.from_param(True)), r"^<cparam '\?' at 0x[A-Fa-f0-9]+>$") |
||||
+ self.assertEqual(repr(c_char.from_param(97)), "<cparam 'c' ('a')>") |
||||
+ self.assertRegex(repr(c_wchar.from_param('a')), r"^<cparam 'u' at 0x[A-Fa-f0-9]+>$") |
||||
+ self.assertEqual(repr(c_byte.from_param(98)), "<cparam 'b' (98)>") |
||||
+ self.assertEqual(repr(c_ubyte.from_param(98)), "<cparam 'B' (98)>") |
||||
+ self.assertEqual(repr(c_short.from_param(511)), "<cparam 'h' (511)>") |
||||
+ self.assertEqual(repr(c_ushort.from_param(511)), "<cparam 'H' (511)>") |
||||
+ self.assertRegex(repr(c_int.from_param(20000)), r"^<cparam '[li]' \(20000\)>$") |
||||
+ self.assertRegex(repr(c_uint.from_param(20000)), r"^<cparam '[LI]' \(20000\)>$") |
||||
+ self.assertRegex(repr(c_long.from_param(20000)), r"^<cparam '[li]' \(20000\)>$") |
||||
+ self.assertRegex(repr(c_ulong.from_param(20000)), r"^<cparam '[LI]' \(20000\)>$") |
||||
+ self.assertRegex(repr(c_longlong.from_param(20000)), r"^<cparam '[liq]' \(20000\)>$") |
||||
+ self.assertRegex(repr(c_ulonglong.from_param(20000)), r"^<cparam '[LIQ]' \(20000\)>$") |
||||
+ self.assertEqual(repr(c_float.from_param(1.5)), "<cparam 'f' (1.5)>") |
||||
+ self.assertEqual(repr(c_double.from_param(1.5)), "<cparam 'd' (1.5)>") |
||||
+ self.assertEqual(repr(c_double.from_param(1e300)), "<cparam 'd' (1e+300)>") |
||||
+ self.assertRegex(repr(c_longdouble.from_param(1.5)), r"^<cparam ('d' \(1.5\)|'g' at 0x[A-Fa-f0-9]+)>$") |
||||
+ self.assertRegex(repr(c_char_p.from_param(b'hihi')), "^<cparam 'z' \(0x[A-Fa-f0-9]+\)>$") |
||||
+ self.assertRegex(repr(c_wchar_p.from_param('hihi')), "^<cparam 'Z' \(0x[A-Fa-f0-9]+\)>$") |
||||
+ self.assertRegex(repr(c_void_p.from_param(0x12)), r"^<cparam 'P' \(0x0*12\)>$") |
||||
+ |
||||
################################################################ |
||||
|
||||
if __name__ == '__main__': |
||||
diff --git a/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst b/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst |
||||
new file mode 100644 |
||||
index 0000000000..7df65a156f |
||||
--- /dev/null |
||||
+++ b/Misc/NEWS.d/next/Security/2021-01-18-09-27-31.bpo-42938.4Zn4Mp.rst |
||||
@@ -0,0 +1,2 @@ |
||||
+Avoid static buffers when computing the repr of :class:`ctypes.c_double` and |
||||
+:class:`ctypes.c_longdouble` values. |
||||
diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c |
||||
index a9b8675cd9..de75918d49 100644 |
||||
--- a/Modules/_ctypes/callproc.c |
||||
+++ b/Modules/_ctypes/callproc.c |
||||
@@ -484,58 +484,47 @@ is_literal_char(unsigned char c) |
||||
static PyObject * |
||||
PyCArg_repr(PyCArgObject *self) |
||||
{ |
||||
- char buffer[256]; |
||||
switch(self->tag) { |
||||
case 'b': |
||||
case 'B': |
||||
- sprintf(buffer, "<cparam '%c' (%d)>", |
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>", |
||||
self->tag, self->value.b); |
||||
- break; |
||||
case 'h': |
||||
case 'H': |
||||
- sprintf(buffer, "<cparam '%c' (%d)>", |
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>", |
||||
self->tag, self->value.h); |
||||
- break; |
||||
case 'i': |
||||
case 'I': |
||||
- sprintf(buffer, "<cparam '%c' (%d)>", |
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%d)>", |
||||
self->tag, self->value.i); |
||||
- break; |
||||
case 'l': |
||||
case 'L': |
||||
- sprintf(buffer, "<cparam '%c' (%ld)>", |
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%ld)>", |
||||
self->tag, self->value.l); |
||||
- break; |
||||
|
||||
case 'q': |
||||
case 'Q': |
||||
- sprintf(buffer, |
||||
-#ifdef MS_WIN32 |
||||
- "<cparam '%c' (%I64d)>", |
||||
-#else |
||||
- "<cparam '%c' (%lld)>", |
||||
-#endif |
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%lld)>", |
||||
self->tag, self->value.q); |
||||
- break; |
||||
case 'd': |
||||
- sprintf(buffer, "<cparam '%c' (%f)>", |
||||
- self->tag, self->value.d); |
||||
- break; |
||||
- case 'f': |
||||
- sprintf(buffer, "<cparam '%c' (%f)>", |
||||
- self->tag, self->value.f); |
||||
- break; |
||||
- |
||||
+ case 'f': { |
||||
+ PyObject *f = PyFloat_FromDouble((self->tag == 'f') ? self->value.f : self->value.d); |
||||
+ if (f == NULL) { |
||||
+ return NULL; |
||||
+ } |
||||
+ PyObject *result = PyUnicode_FromFormat("<cparam '%c' (%R)>", self->tag, f); |
||||
+ Py_DECREF(f); |
||||
+ return result; |
||||
+ } |
||||
case 'c': |
||||
if (is_literal_char((unsigned char)self->value.c)) { |
||||
- sprintf(buffer, "<cparam '%c' ('%c')>", |
||||
+ return PyUnicode_FromFormat("<cparam '%c' ('%c')>", |
||||
self->tag, self->value.c); |
||||
} |
||||
else { |
||||
- sprintf(buffer, "<cparam '%c' ('\\x%02x')>", |
||||
+ return PyUnicode_FromFormat("<cparam '%c' ('\\x%02x')>", |
||||
self->tag, (unsigned char)self->value.c); |
||||
} |
||||
- break; |
||||
|
||||
/* Hm, are these 'z' and 'Z' codes useful at all? |
||||
Shouldn't they be replaced by the functionality of c_string |
||||
@@ -544,22 +533,20 @@ PyCArg_repr(PyCArgObject *self) |
||||
case 'z': |
||||
case 'Z': |
||||
case 'P': |
||||
- sprintf(buffer, "<cparam '%c' (%p)>", |
||||
+ return PyUnicode_FromFormat("<cparam '%c' (%p)>", |
||||
self->tag, self->value.p); |
||||
break; |
||||
|
||||
default: |
||||
if (is_literal_char((unsigned char)self->tag)) { |
||||
- sprintf(buffer, "<cparam '%c' at %p>", |
||||
+ return PyUnicode_FromFormat("<cparam '%c' at %p>", |
||||
(unsigned char)self->tag, (void *)self); |
||||
} |
||||
else { |
||||
- sprintf(buffer, "<cparam 0x%02x at %p>", |
||||
+ return PyUnicode_FromFormat("<cparam 0x%02x at %p>", |
||||
(unsigned char)self->tag, (void *)self); |
||||
} |
||||
- break; |
||||
} |
||||
- return PyUnicode_FromString(buffer); |
||||
} |
||||
|
||||
static PyMemberDef PyCArgType_members[] = { |
@ -0,0 +1,272 @@
@@ -0,0 +1,272 @@
|
||||
--- |
||||
kpartx/lopart.c | 206 ++++++++++++++++++++++++++++++++++---------------------- |
||||
1 file changed, 127 insertions(+), 79 deletions(-) |
||||
|
||||
Index: multipath-tools-130222/kpartx/lopart.c |
||||
=================================================================== |
||||
--- multipath-tools-130222.orig/kpartx/lopart.c |
||||
+++ multipath-tools-130222/kpartx/lopart.c |
||||
@@ -25,8 +25,9 @@ |
||||
#include <sys/ioctl.h> |
||||
#include <sys/stat.h> |
||||
#include <sys/mman.h> |
||||
+#include <sys/types.h> |
||||
+#include <dirent.h> |
||||
#include <sysmacros.h> |
||||
-#include <asm/posix_types.h> |
||||
#include <linux/loop.h> |
||||
|
||||
#include "lopart.h" |
||||
@@ -96,97 +97,149 @@ is_loop_device (const char *device) |
||||
#define SIZE(a) (sizeof(a)/sizeof(a[0])) |
||||
|
||||
extern char * |
||||
-find_loop_by_file (const char * filename) |
||||
+find_loop_by_file(const char *filename) |
||||
{ |
||||
- char dev[64]; |
||||
- char *loop_formats[] = { "/dev/loop%d", "/dev/loop/%d" }; |
||||
- int i, j, fd; |
||||
+ DIR *dir; |
||||
+ struct dirent *dent; |
||||
+ char dev[64], *found = NULL, *p; |
||||
+ int fd, bytes_read; |
||||
struct stat statbuf; |
||||
struct loop_info loopinfo; |
||||
+ const char VIRT_BLOCK[] = "/sys/devices/virtual/block"; |
||||
+ char path[PATH_MAX]; |
||||
+ char bf_path[PATH_MAX]; |
||||
+ char backing_file[PATH_MAX]; |
||||
|
||||
- for (j = 0; j < SIZE(loop_formats); j++) { |
||||
+ dir = opendir(VIRT_BLOCK); |
||||
+ if (!dir) |
||||
+ return NULL; |
||||
|
||||
- for (i = 0; i < 256; i++) { |
||||
- sprintf (dev, loop_formats[j], i); |
||||
+ while ((dent = readdir(dir)) != NULL) { |
||||
+ if (strncmp(dent->d_name,"loop",4)) |
||||
+ continue; |
||||
|
||||
- if (stat (dev, &statbuf) != 0 || |
||||
- !S_ISBLK(statbuf.st_mode)) |
||||
- continue; |
||||
+ if (snprintf(path, PATH_MAX, "%s/%s/dev", VIRT_BLOCK, |
||||
+ dent->d_name) >= PATH_MAX) |
||||
+ continue; |
||||
|
||||
- fd = open (dev, O_RDONLY); |
||||
+ fd = open(path, O_RDONLY); |
||||
+ if (fd < 0) |
||||
+ continue; |
||||
|
||||
- if (fd < 0) |
||||
- break; |
||||
+ bytes_read = read(fd, dev, sizeof(dev) - 1); |
||||
+ if (bytes_read <= 0) { |
||||
+ close(fd); |
||||
+ continue; |
||||
+ } |
||||
|
||||
- if (ioctl (fd, LOOP_GET_STATUS, &loopinfo) != 0) { |
||||
- close (fd); |
||||
- continue; |
||||
- } |
||||
+ close(fd); |
||||
|
||||
- if (0 == strcmp(filename, loopinfo.lo_name)) { |
||||
- close (fd); |
||||
- return xstrdup(dev); /*found */ |
||||
- } |
||||
+ dev[bytes_read] = '\0'; |
||||
+ p = strchr(dev, '\n'); |
||||
+ if (p != NULL) |
||||
+ *p = '\0'; |
||||
+ if (snprintf(path, PATH_MAX, "/dev/block/%s", dev) >= PATH_MAX) |
||||
+ continue; |
||||
|
||||
+ fd = open (path, O_RDONLY); |
||||
+ if (fd < 0) |
||||
+ continue; |
||||
+ |
||||
+ if (fstat (fd, &statbuf) != 0 || |
||||
+ !S_ISBLK(statbuf.st_mode)) { |
||||
+ close (fd); |
||||
+ continue; |
||||
+ } |
||||
+ |
||||
+ if (ioctl (fd, LOOP_GET_STATUS, &loopinfo) != 0) { |
||||
close (fd); |
||||
continue; |
||||
} |
||||
+ |
||||
+ close (fd); |
||||
+ |
||||
+ if (0 == strcmp(filename, loopinfo.lo_name)) { |
||||
+ found = realpath(path, NULL); |
||||
+ break; |
||||
+ } |
||||
+ |
||||
+ /* |
||||
+ * filename is a realpath, while loopinfo.lo_name may hold just the |
||||
+ * basename. If that's the case, try to match filename against the |
||||
+ * backing_file entry for this loop entry |
||||
+ */ |
||||
+ if (snprintf(bf_path, PATH_MAX, "%s/%s/loop/backing_file", |
||||
+ VIRT_BLOCK, dent->d_name) >= PATH_MAX) |
||||
+ continue; |
||||
+ |
||||
+ fd = open(bf_path, O_RDONLY); |
||||
+ if (fd < 0) |
||||
+ continue; |
||||
+ |
||||
+ bytes_read = read(fd, backing_file, sizeof(backing_file) - 1); |
||||
+ if (bytes_read <= 0) { |
||||
+ close(fd); |
||||
+ continue; |
||||
+ } |
||||
+ |
||||
+ close(fd); |
||||
+ |
||||
+ backing_file[bytes_read-1] = '\0'; |
||||
+ |
||||
+ if (0 == strcmp(filename, backing_file)) { |
||||
+ found = realpath(path, NULL); |
||||
+ break; |
||||
+ } |
||||
} |
||||
- return NULL; |
||||
+ closedir(dir); |
||||
+ return found; |
||||
} |
||||
|
||||
extern char * |
||||
-find_unused_loop_device (void) |
||||
+find_unused_loop_device(void) |
||||
{ |
||||
- /* Just creating a device, say in /tmp, is probably a bad idea - |
||||
- people might have problems with backup or so. |
||||
- So, we just try /dev/loop[0-7]. */ |
||||
- |
||||
- char dev[20]; |
||||
- char *loop_formats[] = { "/dev/loop%d", "/dev/loop/%d" }; |
||||
- int i, j, fd, first = 0, somedev = 0, someloop = 0, loop_known = 0; |
||||
+ char dev[20], *next_loop_dev = NULL; |
||||
+ int fd, next_loop = 0, somedev = 0, someloop = 0, loop_known = 0; |
||||
struct stat statbuf; |
||||
struct loop_info loopinfo; |
||||
FILE *procdev; |
||||
|
||||
- if (stat("/dev/loop-control", &statbuf) == 0 && |
||||
- S_ISCHR(statbuf.st_mode)) { |
||||
- fd = open("/dev/loop-control", O_RDWR); |
||||
- if (fd >= 0) { |
||||
- first = ioctl(fd, LOOP_CTL_GET_FREE); |
||||
- close(fd); |
||||
+ while (next_loop_dev == NULL) { |
||||
+ if (stat("/dev/loop-control", &statbuf) == 0 && |
||||
+ S_ISCHR(statbuf.st_mode)) { |
||||
+ int next_loop_fd; |
||||
+ |
||||
+ next_loop_fd = open("/dev/loop-control", O_RDWR); |
||||
+ if (next_loop_fd < 0) |
||||
+ return NULL; |
||||
+ next_loop = ioctl(next_loop_fd, LOOP_CTL_GET_FREE); |
||||
+ close(next_loop_fd); |
||||
+ if (next_loop < 0) |
||||
+ return NULL; |
||||
} |
||||
- if (first < 0) |
||||
- first = 0; |
||||
- } |
||||
- for (j = 0; j < SIZE(loop_formats); j++) { |
||||
- |
||||
- for(i = first; i < 256; i++) { |
||||
- sprintf(dev, loop_formats[j], i); |
||||
- |
||||
- if (stat (dev, &statbuf) == 0 && S_ISBLK(statbuf.st_mode)) { |
||||
- somedev++; |
||||
- fd = open (dev, O_RDONLY); |
||||
|
||||
- if (fd >= 0) { |
||||
+ sprintf(dev, "/dev/loop%d", next_loop); |
||||
|
||||
+ fd = open (dev, O_RDONLY); |
||||
+ if (fd >= 0) { |
||||
+ if (fstat (fd, &statbuf) == 0 && |
||||
+ S_ISBLK(statbuf.st_mode)) { |
||||
+ somedev++; |
||||
if(ioctl (fd, LOOP_GET_STATUS, &loopinfo) == 0) |
||||
- someloop++; /* in use */ |
||||
- |
||||
- else if (errno == ENXIO) { |
||||
- close (fd); |
||||
- return xstrdup(dev);/* probably free */ |
||||
- } |
||||
+ someloop++; /* in use */ |
||||
+ else if (errno == ENXIO) |
||||
+ next_loop_dev = xstrdup(dev); |
||||
|
||||
- close (fd); |
||||
} |
||||
- |
||||
+ close (fd); |
||||
+ |
||||
/* continue trying as long as devices exist */ |
||||
continue; |
||||
} |
||||
break; |
||||
- } |
||||
} |
||||
+ if (next_loop_dev) |
||||
+ return next_loop_dev; |
||||
|
||||
/* Nothing found. Why not? */ |
||||
if ((procdev = fopen(PROC_DEVICES, "r")) != NULL) { |
||||
@@ -209,29 +262,24 @@ find_unused_loop_device (void) |
||||
fprintf(stderr, "mount: could not find any device /dev/loop#"); |
||||
|
||||
else if (!someloop) { |
||||
- |
||||
- if (loop_known == 1) |
||||
- fprintf(stderr, |
||||
- "mount: Could not find any loop device.\n" |
||||
- " Maybe /dev/loop# has a wrong major number?"); |
||||
- |
||||
- else if (loop_known == -1) |
||||
- fprintf(stderr, |
||||
- "mount: Could not find any loop device, and, according to %s,\n" |
||||
- " this kernel does not know about the loop device.\n" |
||||
- " (If so, then recompile or `modprobe loop'.)", |
||||
- PROC_DEVICES); |
||||
- |
||||
- else |
||||
- fprintf(stderr, |
||||
- "mount: Could not find any loop device. Maybe this kernel does not know\n" |
||||
- " about the loop device (then recompile or `modprobe loop'), or\n" |
||||
- " maybe /dev/loop# has the wrong major number?"); |
||||
- |
||||
+ if (loop_known == 1) |
||||
+ fprintf(stderr, |
||||
+ "mount: Could not find any loop device.\n" |
||||
+ " Maybe /dev/loop# has a wrong major number?"); |
||||
+ else if (loop_known == -1) |
||||
+ fprintf(stderr, |
||||
+ "mount: Could not find any loop device, and, according to %s,\n" |
||||
+ " this kernel does not know about the loop device.\n" |
||||
+ " (If so, then recompile or `modprobe loop'.)", |
||||
+ PROC_DEVICES); |
||||
+ else |
||||
+ fprintf(stderr, |
||||
+ "mount: Could not find any loop device. Maybe this kernel does not know\n" |
||||
+ " about the loop device (then recompile or `modprobe loop'), or\n" |
||||
+ " maybe /dev/loop# has the wrong major number?"); |
||||
} else |
||||
fprintf(stderr, "mount: could not find any free loop device"); |
||||
- |
||||
- return 0; |
||||
+ return NULL; |
||||
} |
||||
|
||||
extern int |
@ -0,0 +1,51 @@
@@ -0,0 +1,51 @@
|
||||
--- |
||||
kpartx/devmapper.c | 3 ++- |
||||
libmultipath/devmapper.c | 7 ++++++- |
||||
2 files changed, 8 insertions(+), 2 deletions(-) |
||||
|
||||
Index: multipath-tools-130222/kpartx/devmapper.c |
||||
=================================================================== |
||||
--- multipath-tools-130222.orig/kpartx/devmapper.c |
||||
+++ multipath-tools-130222/kpartx/devmapper.c |
||||
@@ -330,7 +330,8 @@ dm_get_map(int major, int minor, char * |
||||
next = dm_get_next_target(dmt, next, &start, &length, |
||||
&target_type, ¶ms); |
||||
|
||||
- if (snprintf(outparams, PARAMS_SIZE, "%s", params) <= PARAMS_SIZE) |
||||
+ if (params && |
||||
+ snprintf(outparams, PARAMS_SIZE, "%s", params) <= PARAMS_SIZE) |
||||
r = 0; |
||||
out: |
||||
dm_task_destroy(dmt); |
||||
Index: multipath-tools-130222/libmultipath/devmapper.c |
||||
=================================================================== |
||||
--- multipath-tools-130222.orig/libmultipath/devmapper.c |
||||
+++ multipath-tools-130222/libmultipath/devmapper.c |
||||
@@ -461,6 +461,8 @@ dm_get_map(const char * name, unsigned l |
||||
/* Fetch 1st target */ |
||||
next = dm_get_next_target(dmt, next, &start, &length, |
||||
&target_type, ¶ms); |
||||
+ if (!params) |
||||
+ goto out; |
||||
|
||||
if (size) |
||||
*size = length; |
||||
@@ -564,7 +566,8 @@ dm_get_status(char * name, char * outsta |
||||
next = dm_get_next_target(dmt, next, &start, &length, |
||||
&target_type, &status); |
||||
|
||||
- if (snprintf(outstatus, PARAMS_SIZE, "%s", status) <= PARAMS_SIZE) |
||||
+ if (status && |
||||
+ snprintf(outstatus, PARAMS_SIZE, "%s", status) <= PARAMS_SIZE) |
||||
r = 0; |
||||
out: |
||||
if (r) |
||||
@@ -1525,6 +1528,8 @@ int dm_reassign_table(const char *name, |
||||
do { |
||||
next = dm_get_next_target(dmt, next, &start, &length, |
||||
&target, ¶ms); |
||||
+ if (!params || !target) |
||||
+ continue; |
||||
memset(buff, 0, PARAMS_SIZE); |
||||
strcpy(buff, params); |
||||
if (strcmp(target, TGT_MPATH) && strstr(params, old)) { |
@ -0,0 +1,26 @@
@@ -0,0 +1,26 @@
|
||||
--- |
||||
multipath/multipath.conf.5 | 4 ++-- |
||||
1 file changed, 2 insertions(+), 2 deletions(-) |
||||
|
||||
Index: multipath-tools-130222/multipath/multipath.conf.5 |
||||
=================================================================== |
||||
--- multipath-tools-130222.orig/multipath/multipath.conf.5 |
||||
+++ multipath-tools-130222/multipath/multipath.conf.5 |
||||
@@ -213,7 +213,7 @@ Default value is \fBconst\fR. |
||||
.RE |
||||
.TP |
||||
.B prio_args |
||||
-Arguments to pass to to the prio function. This only applies to certain |
||||
+Arguments to pass to the prio function. This only applies to certain |
||||
prioritizers |
||||
.RS |
||||
.TP 12 |
||||
@@ -228,7 +228,7 @@ regex can be of device name format Ex: |
||||
regex can be of the form |
||||
.I "host_wwnn:host_wwpn:target_wwnn:target_wwpn" |
||||
these values can be looked up through sysfs or by running |
||||
-.I mulitpathd show paths format "%N:%R:%n:%r" |
||||
+.I multipathd show paths format "%N:%R:%n:%r" |
||||
Ex: 0x200100e08ba0aea0:0x210100e08ba0aea0:.*:.* , .*:.*:iqn.2009-10.com.redhat.msp.lab.ask-06:.* |
||||
.TP |
||||
.B alua |
Loading…
Reference in new issue