diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..dafa321 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +/*.tar.* diff --git a/00001-rpath.patch b/00001-rpath.patch new file mode 100644 index 0000000..9fae54c --- /dev/null +++ b/00001-rpath.patch @@ -0,0 +1,19 @@ +diff -up Python-3.1.1/Lib/distutils/unixccompiler.py.rpath Python-3.1.1/Lib/distutils/unixccompiler.py +--- Python-3.1.1/Lib/distutils/unixccompiler.py.rpath 2009-09-04 17:29:34.000000000 -0400 ++++ Python-3.1.1/Lib/distutils/unixccompiler.py 2009-09-04 17:49:54.000000000 -0400 +@@ -141,6 +141,15 @@ class UnixCCompiler(CCompiler): + if sys.platform == "cygwin": + exe_extension = ".exe" + ++ def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): ++ """Remove standard library path from rpath""" ++ libraries, library_dirs, runtime_library_dirs = super()._fix_lib_args( ++ libraries, library_dirs, runtime_library_dirs) ++ libdir = sysconfig.get_config_var('LIBDIR') ++ if runtime_library_dirs and (libdir in runtime_library_dirs): ++ runtime_library_dirs.remove(libdir) ++ return libraries, library_dirs, runtime_library_dirs ++ + def preprocess(self, source, output_file=None, macros=None, + include_dirs=None, extra_preargs=None, extra_postargs=None): + fixed_args = self._fix_compile_args(None, macros, include_dirs) diff --git a/00055-systemtap.patch b/00055-systemtap.patch new file mode 100644 index 0000000..a48fe7c --- /dev/null +++ b/00055-systemtap.patch @@ -0,0 +1,762 @@ +diff -up Python-3.3.0rc2/configure.ac.systemtap Python-3.3.0rc2/configure.ac +--- Python-3.3.0rc2/configure.ac.systemtap 2012-09-09 05:11:14.000000000 -0400 ++++ Python-3.3.0rc2/configure.ac 2012-09-10 09:17:21.114511781 -0400 +@@ -2678,6 +2678,23 @@ if test "$with_valgrind" != no; then + OPT="-DDYNAMIC_ANNOTATIONS_ENABLED=1 $OPT" + fi + ++# Check for systemtap support ++# On Linux, /usr/bin/dtrace is in fact a shim to SystemTap ++AC_MSG_CHECKING([for --with-systemtap]) ++AC_ARG_WITH([systemtap], ++ AC_HELP_STRING([--with(out)-systemtap], [disable/enable SystemTap support]),, ++ with_systemtap=no) ++AC_MSG_RESULT([$with_systemtap]) ++if test "$with_systemtap" != no; then ++ AC_DEFINE(WITH_SYSTEMTAP, 1, ++ [Define if you want to compile in SystemTap support]) ++ SYSTEMTAPOBJS="Python/pysystemtap.o" ++ SYSTEMTAPDEPS="\$(srcdir)/Python/pysystemtap.h" ++fi ++ ++AC_SUBST(SYSTEMTAPOBJS) ++AC_SUBST(SYSTEMTAPDEPS) ++ + # -I${DLINCLDIR} is added to the compile rule for importdl.o + AC_SUBST(DLINCLDIR) + DLINCLDIR=. +diff -up Python-3.3.0rc2/Doc/howto/index.rst.systemtap Python-3.3.0rc2/Doc/howto/index.rst +--- Python-3.3.0rc2/Doc/howto/index.rst.systemtap 2012-09-09 05:10:51.000000000 -0400 ++++ Python-3.3.0rc2/Doc/howto/index.rst 2012-09-10 09:17:21.117511779 -0400 +@@ -29,4 +29,5 @@ Currently, the HOWTOs are: + argparse.rst + ipaddress.rst + clinic.rst ++ instrumentation.rst + +diff -up Python-3.3.0rc2/Doc/howto/instrumentation.rst.systemtap Python-3.3.0rc2/Doc/howto/instrumentation.rst +--- Python-3.3.0rc2/Doc/howto/instrumentation.rst.systemtap 2012-09-10 09:17:21.117511779 -0400 ++++ Python-3.3.0rc2/Doc/howto/instrumentation.rst 2012-09-10 09:17:21.117511779 -0400 +@@ -0,0 +1,295 @@ ++.. _instrumentation: ++ ++==================================== ++Instrumenting CPython with SystemTap ++==================================== ++ ++:author: David Malcolm ++ ++DTrace and SystemTap are monitoring tools, each providing a way to inspect ++what the processes on a computer system are doing. They both use ++domain-specific languages allowing a user to write scripts which: ++ ++ - filter which processes are to be observed ++ - gather data from the processes of interest ++ - generate reports on the data ++ ++As of Python 3.3, CPython can be built with embedded "markers" that can be ++observed by a SystemTap script, making it easier to monitor what the CPython ++processes on a system are doing. ++ ++.. Potentially this document could be expanded to also cover DTrace markers. ++ However, I'm not a DTrace expert. ++ ++.. I'm using ".. code-block:: c" for SystemTap scripts, as "c" is syntactically ++ the closest match that Sphinx supports ++ ++ ++Enabling the static markers ++--------------------------- ++ ++In order to build CPython with the embedded markers for SystemTap, the ++SystemTap development tools must be installed. ++ ++On a Fedora or Red Hat Enterprise Linux machine, this can be done via:: ++ ++ yum install systemtap-sdt-devel ++ ++CPython must then be configured `--with-systemtap`:: ++ ++ checking for --with-systemtap... yes ++ ++You can verify if the SystemTap static markers are present in the built ++binary by seeing if it contains a ".note.stapsdt" section. ++ ++.. code-block:: bash ++ ++ $ eu-readelf -S ./python | grep .note.stapsdt ++ [29] .note.stapsdt NOTE 0000000000000000 00308d78 000000b8 0 0 0 4 ++ ++If you've built python as a shared library (with --enable-shared), you need ++to look instead within the shared library. For example: ++ ++.. code-block:: bash ++ ++ $ eu-readelf -S libpython3.3dm.so.1.0 | grep .note.stapsdt ++ [28] .note.stapsdt NOTE 0000000000000000 00365b68 000000b8 0 0 0 4 ++ ++Earlier versions of SystemTap stored the markers in a ".probes" section. ++ ++For the curious, you can see the metadata for the static markers using this ++invocation. ++ ++.. code-block:: bash ++ ++ $ eu-readelf -x .note.stapsdt ./python ++ ++ Hex dump of section [29] '.note.stapsdt', 184 bytes at offset 0x308d78: ++ 0x00000000 08000000 45000000 03000000 73746170 ....E.......stap ++ 0x00000010 73647400 d4664b00 00000000 4fc36600 sdt..fK.....O.f. ++ 0x00000020 00000000 488d9000 00000000 70797468 ....H.......pyth ++ 0x00000030 6f6e0066 756e6374 696f6e5f 5f656e74 on.function__ent ++ 0x00000040 72790038 40257261 78203840 25726478 ry.8@%rax 8@%rdx ++ 0x00000050 202d3440 25656378 00000000 08000000 -4@%ecx........ ++ 0x00000060 46000000 03000000 73746170 73647400 F.......stapsdt. ++ 0x00000070 0d674b00 00000000 4fc36600 00000000 .gK.....O.f..... ++ 0x00000080 4a8d9000 00000000 70797468 6f6e0066 J.......python.f ++ 0x00000090 756e6374 696f6e5f 5f726574 75726e00 unction__return. ++ 0x000000a0 38402572 61782038 40257264 78202d34 8@%rax 8@%rdx -4 ++ 0x000000b0 40256563 78000000 @%ecx... ++ ++and a sufficiently modern eu-readelf can print the metadata: ++ ++.. code-block:: bash ++ ++ $ eu-readelf -n ./python ++ ++ Note section [ 1] '.note.gnu.build-id' of 36 bytes at offset 0x190: ++ Owner Data size Type ++ GNU 20 GNU_BUILD_ID ++ Build ID: a28f8db1b224530b0d38ad7b82a249cf7c3f18d6 ++ ++ Note section [27] '.note.stapsdt' of 184 bytes at offset 0x1ae884: ++ Owner Data size Type ++ stapsdt 70 Version: 3 ++ PC: 0xe0d3a, Base: 0x14b150, Semaphore: 0x3ae882 ++ Provider: python, Name: function__return, Args: '8@%rbx 8@%r13 -4@%eax' ++ stapsdt 69 Version: 3 ++ PC: 0xe0f37, Base: 0x14b150, Semaphore: 0x3ae880 ++ Provider: python, Name: function__entry, Args: '8@%rbx 8@%r13 -4@%eax' ++ ++The above metadata contains information for SystemTap describing how it can ++patch strategically-placed machine code instructions to enable the tracing ++hooks used by a SystemTap script. ++ ++ ++Static markers ++-------------- ++ ++The low-level way to use the SystemTap integration is to use the static ++markers directly. This requires you to explicitly state the binary file ++containing them. ++ ++For example, this script can be used to show the call/return hierarchy of a ++Python script: ++ ++.. code-block:: c ++ ++ probe process('python').mark("function__entry") { ++ filename = user_string($arg1); ++ funcname = user_string($arg2); ++ lineno = $arg3; ++ ++ printf("%s => %s in %s:%d\\n", ++ thread_indent(1), funcname, filename, lineno); ++ } ++ ++ probe process('python').mark("function__return") { ++ filename = user_string($arg1); ++ funcname = user_string($arg2); ++ lineno = $arg3; ++ ++ printf("%s <= %s in %s:%d\\n", ++ thread_indent(-1), funcname, filename, lineno); ++ } ++ ++It can be invoked like this: ++ ++.. code-block:: bash ++ ++ $ stap \ ++ show-call-hierarchy.stp \ ++ -c ./python test.py ++ ++The output looks like this:: ++ ++ 11408 python(8274): => __contains__ in Lib/_abcoll.py:362 ++ 11414 python(8274): => __getitem__ in Lib/os.py:425 ++ 11418 python(8274): => encode in Lib/os.py:490 ++ 11424 python(8274): <= encode in Lib/os.py:493 ++ 11428 python(8274): <= __getitem__ in Lib/os.py:426 ++ 11433 python(8274): <= __contains__ in Lib/_abcoll.py:366 ++ ++where the columns are: ++ ++ - time in microseconds since start of script ++ ++ - name of executable ++ ++ - PID of process ++ ++and the remainder indicates the call/return hierarchy as the script executes. ++ ++For a `--enable-shared` build of CPython, the markers are contained within the ++libpython shared library, and the probe's dotted path needs to reflect this. For ++example, this line from the above example:: ++ ++ probe process('python').mark("function__entry") { ++ ++should instead read:: ++ ++ probe process('python').library("libpython3.3dm.so.1.0").mark("function__entry") { ++ ++(assuming a debug build of CPython 3.3) ++ ++.. I'm reusing the "c:function" type for markers ++ ++.. c:function:: function__entry(str filename, str funcname, int lineno) ++ ++ This marker indicates that execution of a Python function has begun. It is ++ only triggered for pure-python (bytecode) functions. ++ ++ The filename, function name, and line number are provided back to the ++ tracing script as positional arguments, which must be accessed using ++ `$arg1`, `$arg2`: ++ ++ * `$arg1` : `(const char *)` filename, accessible using `user_string($arg1)` ++ ++ * `$arg2` : `(const char *)` function name, accessible using ++ `user_string($arg2)` ++ ++ * `$arg3` : `int` line number ++ ++ * `$arg4` : `(PyFrameObject *)`, the frame being executed ++ ++.. c:function:: function__return(str filename, str funcname, int lineno) ++ ++ This marker is the converse of `function__entry`, and indicates that ++ execution of a Python function has ended (either via ``return``, or via an ++ exception). It is only triggered for pure-python (bytecode) functions. ++ ++ The arguments are the same as for `function__entry` ++ ++ ++Tapsets ++------- ++ ++The higher-level way to use the SystemTap integration is to use a "tapset": ++SystemTap's equivalent of a library, which hides some of the lower-level ++details of the static markers. ++ ++Here is a tapset file, based on a non-shared build of CPython: ++ ++.. code-block:: c ++ ++ /* ++ Provide a higher-level wrapping around the function__entry and ++ function__return markers: ++ */ ++ probe python.function.entry = process("python").mark("function__entry") ++ { ++ filename = user_string($arg1); ++ funcname = user_string($arg2); ++ lineno = $arg3; ++ frameptr = $arg4 ++ } ++ probe python.function.return = process("python").mark("function__return") ++ { ++ filename = user_string($arg1); ++ funcname = user_string($arg2); ++ lineno = $arg3; ++ frameptr = $arg4 ++ } ++ ++If this file is installed in SystemTap's tapset directory (e.g. ++`/usr/share/systemtap/tapset`), then these additional probepoints become ++available: ++ ++.. c:function:: python.function.entry(str filename, str funcname, int lineno, frameptr) ++ ++ This probe point indicates that execution of a Python function has begun. ++ It is only triggered for pure-python (bytecode) functions. ++ ++.. c:function:: python.function.return(str filename, str funcname, int lineno, frameptr) ++ ++ This probe point is the converse of `python.function.return`, and indicates ++ that execution of a Python function has ended (either via ``return``, or ++ via an exception). It is only triggered for pure-python (bytecode) functions. ++ ++ ++Examples ++-------- ++This SystemTap script uses the tapset above to more cleanly implement the ++example given above of tracing the Python function-call hierarchy, without ++needing to directly name the static markers: ++ ++.. code-block:: c ++ ++ probe python.function.entry ++ { ++ printf("%s => %s in %s:%d\n", ++ thread_indent(1), funcname, filename, lineno); ++ } ++ ++ probe python.function.return ++ { ++ printf("%s <= %s in %s:%d\n", ++ thread_indent(-1), funcname, filename, lineno); ++ } ++ ++ ++The following script uses the tapset above to provide a top-like view of all ++running CPython code, showing the top 20 most frequently-entered bytecode ++frames, each second, across the whole system: ++ ++.. code-block:: c ++ ++ global fn_calls; ++ ++ probe python.function.entry ++ { ++ fn_calls[pid(), filename, funcname, lineno] += 1; ++ } ++ ++ probe timer.ms(1000) { ++ printf("\033[2J\033[1;1H") /* clear screen */ ++ printf("%6s %80s %6s %30s %6s\n", ++ "PID", "FILENAME", "LINE", "FUNCTION", "CALLS") ++ foreach ([pid, filename, funcname, lineno] in fn_calls- limit 20) { ++ printf("%6d %80s %6d %30s %6d\n", ++ pid, filename, lineno, funcname, ++ fn_calls[pid, filename, funcname, lineno]); ++ } ++ delete fn_calls; ++ } ++ +diff -up Python-3.3.0rc2/Lib/test/test_systemtap.py.systemtap Python-3.3.0rc2/Lib/test/test_systemtap.py +--- Python-3.3.0rc2/Lib/test/test_systemtap.py.systemtap 2012-09-10 09:17:21.117511779 -0400 ++++ Python-3.3.0rc2/Lib/test/test_systemtap.py 2012-09-10 09:17:21.117511779 -0400 +@@ -0,0 +1,234 @@ ++# Verify that systemtap static probes work ++# ++import subprocess ++import sys ++import sysconfig ++import os ++import unittest ++ ++from test.support import run_unittest, TESTFN, unlink ++ ++if '--with-systemtap' not in sysconfig.get_config_var('CONFIG_ARGS'): ++ raise unittest.SkipTest("Python was not configured --with-systemtap") ++ ++try: ++ _, stap_version = subprocess.Popen(["stap", "-V"], ++ stdout=subprocess.PIPE, ++ stderr=subprocess.PIPE, ++ ).communicate() ++except OSError: ++ # This is what "no stap" looks like. There may, however, be other ++ # errors that manifest this way too. ++ raise unittest.SkipTest("Couldn't find stap on the path") ++ ++def invoke_systemtap_script(script, cmd): ++ # Start a child process, probing with the given systemtap script ++ # (passed as stdin to the "stap" tool) ++ # The script should be a bytes instance ++ # Return (stdout, stderr) pair ++ ++ p = subprocess.Popen(["stap", "-", '-vv', '-c', cmd], ++ stdin=subprocess.PIPE, ++ stdout=subprocess.PIPE, ++ stderr=subprocess.PIPE) ++ out, err = p.communicate(input=script) ++ return out, err ++ ++# Verify that stap can run a simple "hello world"-style script ++# This can fail for various reasons: ++# - missing kernel headers ++# - permissions (a non-root user needs to be in the "stapdev" group) ++TRIVIAL_STAP_SCRIPT = b'probe begin { println("hello world") exit () }' ++ ++out, err = invoke_systemtap_script(TRIVIAL_STAP_SCRIPT, 'true') ++if out != b'hello world\n': ++ raise unittest.SkipTest("Test systemtap script did not run; stderr was: %s" % err) ++ ++# We don't expect stderr to be empty, since we're invoking stap with "-vv": stap ++# will (we hope) generate debugging output on stderr. ++ ++def invoke_python_under_systemtap(script, pythoncode=None, pythonfile=None): ++ # Start a child python process, probing with the given systemtap script ++ # (passed as stdin to the "stap" tool) ++ # The script should be a bytes instance ++ # Return (stdout, stderr) pair ++ ++ if pythonfile: ++ pythoncmd = '%s %s' % (sys.executable, pythonfile) ++ else: ++ pythoncmd = '%s -c %r' % (sys.executable, pythoncode) ++ ++ # The process tree of a stap invocation of a command goes through ++ # something like this: ++ # stap ->fork/exec(staprun; exec stapio ->f/e(-c cmd); exec staprun -r) ++ # and this trip through setuid leads to LD_LIBRARY_PATH being dropped, ++ # which would lead to an --enable-shared build of python failing to be ++ # find its libpython, with an error like: ++ # error while loading shared libraries: libpython3.3dm.so.1.0: cannot ++ # open shared object file: No such file or directory ++ # Hence we need to jump through some hoops to expose LD_LIBRARY_PATH to ++ # the invoked python process: ++ LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '') ++ if LD_LIBRARY_PATH: ++ pythoncmd = 'env LD_LIBRARY_PATH=%s ' % LD_LIBRARY_PATH + pythoncmd ++ ++ return invoke_systemtap_script(script, pythoncmd) ++ ++# When using the static markers, we need to supply the prefix of a systemtap ++# dotted probe point that containing the marker. ++# See http://sourceware.org/systemtap/langref/Probe_points.html ++# ++# We need to determine if this is a shared-library build ++# ++# Note that sysconfig can get this wrong; see: ++# http://bugs.python.org/issue14774 ++# ++if '--enable-shared' in sysconfig.get_config_var('CONFIG_ARGS'): ++ # For a shared-library build, the markers are in library(INSTSONAME): ++ INSTSONAME = sysconfig.get_config_var('INSTSONAME') ++ probe_prefix = 'process("%s").library("%s")' % (sys.executable, INSTSONAME) ++else: ++ # For a non-shared-library build, we can simply use sys.executable: ++ probe_prefix = 'process("%s")' % sys.executable ++ ++# The following script ought to generate lots of lines showing recursive ++# function entry and return, of the form: ++# 11408 python(8274): => __contains__ in Lib/_abcoll.py:362 ++# 11414 python(8274): => __getitem__ in Lib/os.py:425 ++# 11418 python(8274): => encode in Lib/os.py:490 ++# 11424 python(8274): <= encode in Lib/os.py:493 ++# 11428 python(8274): <= __getitem__ in Lib/os.py:426 ++# 11433 python(8274): <= __contains__ in Lib/_abcoll.py:366 ++# where the column are: ++# - time in microseconds since start of script ++# - name of executable ++# - PID of process ++# and the remainder indicates the call/return hierarchy ++ ++hierarchy_script = (''' ++probe %s.mark("function__entry") { ++ filename = user_string($arg1); ++ funcname = user_string($arg2); ++ lineno = $arg3; ++ ++ printf("%%s => %%s in %%s:%%d\\n", thread_indent(1), funcname, filename, lineno); ++} ++ ++probe %s.mark("function__return") { ++ filename = user_string($arg1); ++ funcname = user_string($arg2); ++ lineno = $arg3; ++ ++ printf("%%s <= %%s in %%s:%%d\\n", thread_indent(-1), funcname, filename, lineno); ++} ++''' % (probe_prefix, probe_prefix)).encode('utf-8') ++ ++ ++class ErrorDumper: ++ # A context manager that dumps extra information if an exception is raised, ++ # to help track down why the problem occurred ++ def __init__(self, out, err): ++ self.out = out ++ self.err = err ++ ++ def __enter__(self): ++ pass ++ ++ def __exit__(self, type_, value, traceback): ++ if type_: ++ # an exception is being raised: ++ print('stdout: %s' % out.decode()) ++ print('stderr: %s' % err.decode()) ++ ++class SystemtapTests(unittest.TestCase): ++ ++ def test_invoking_python(self): ++ # Ensure that we can invoke python under stap, with a trivial stap ++ # script: ++ out, err = invoke_python_under_systemtap( ++ b'probe begin { println("hello from stap") exit () }', ++ pythoncode="print('hello from python')") ++ with ErrorDumper(out, err): ++ self.assertIn(b'hello from stap', out) ++ self.assertIn(b'hello from python', out) ++ ++ def test_function_entry(self): ++ # Ensure that the function_entry static marker works ++ out, err = invoke_python_under_systemtap(hierarchy_script) ++ # stdout ought to contain various lines showing recursive function ++ # entry and return (see above) ++ ++ # Uncomment this for debugging purposes: ++ # print(out.decode('utf-8')) ++ ++ # Executing the cmdline-supplied "pass": ++ # 0 python(8274): => in :1 ++ # 5 python(8274): <= in :1 ++ with ErrorDumper(out, err): ++ self.assertIn(b'=> in :1', out, ++ msg="stdout: %s\nstderr: %s\n" % (out, err)) ++ ++ def test_function_encoding(self): ++ # Ensure that function names containing non-Latin 1 code ++ # points are handled: ++ pythonfile = TESTFN ++ try: ++ unlink(pythonfile) ++ f = open(pythonfile, "wb") ++ f.write(""" ++# Sample script with non-ASCII filename, for use by test_systemtap.py ++# Implicitly UTF-8 ++ ++def 文字化け(): ++ '''Function with non-ASCII identifier; I believe this reads "mojibake"''' ++ print("hello world!") ++ ++文字化け() ++""".encode('utf-8')) ++ f.close() ++ ++ out, err = invoke_python_under_systemtap(hierarchy_script, ++ pythonfile=pythonfile) ++ out_utf8 = out.decode('utf-8') ++ with ErrorDumper(out, err): ++ self.assertIn('=> in %s:5' % pythonfile, out_utf8) ++ self.assertIn(' => 文字化け in %s:5' % pythonfile, out_utf8) ++ self.assertIn(' <= 文字化け in %s:7' % pythonfile, out_utf8) ++ self.assertIn('<= in %s:9' % pythonfile, out_utf8) ++ finally: ++ unlink(pythonfile) ++ ++ @unittest.skipIf(sys.getfilesystemencoding() == 'ascii', ++ 'the test filename is not encodable with ASCII') ++ def test_filename_encoding(self): ++ # Ensure that scripts names containing non-Latin 1 code ++ # points are handled: ++ pythonfile = TESTFN + '_☠.py' ++ try: ++ unlink(pythonfile) ++ f = open(pythonfile, "wb") ++ f.write(""" ++def foo(): ++ '''Function with non-ASCII identifier; I believe this reads "mojibake"''' ++ print("hello world!") ++ ++foo() ++""".encode('utf-8')) ++ f.close() ++ ++ out, err = invoke_python_under_systemtap(hierarchy_script, ++ pythonfile=pythonfile) ++ out_utf8 = out.decode('utf-8') ++ with ErrorDumper(out, err): ++ self.assertIn('=> in %s:2' % pythonfile, out_utf8) ++ self.assertIn(' => foo in %s:2' % pythonfile, out_utf8) ++ self.assertIn(' <= foo in %s:4' % pythonfile, out_utf8) ++ self.assertIn('<= in %s:6' % pythonfile, out_utf8) ++ finally: ++ unlink(pythonfile) ++ ++def test_main(): ++ run_unittest(SystemtapTests) ++ ++if __name__ == "__main__": ++ test_main() +diff -up Python-3.3.0rc2/Makefile.pre.in.systemtap Python-3.3.0rc2/Makefile.pre.in +--- Python-3.3.0rc2/Makefile.pre.in.systemtap 2012-09-09 05:11:05.000000000 -0400 ++++ Python-3.3.0rc2/Makefile.pre.in 2012-09-10 09:19:51.195501518 -0400 +@@ -363,6 +363,7 @@ PYTHON_OBJS= \ + Python/formatter_unicode.o \ + Python/fileutils.o \ + Python/$(DYNLOADFILE) \ ++ @SYSTEMTAPOBJS@ \ + $(LIBOBJS) \ + $(MACHDEP_OBJS) \ + $(THREADOBJ) +@@ -713,7 +714,8 @@ Objects/setobject.o: $(srcdir)/Objects/s + $(OPCODETARGETS_H): $(OPCODETARGETGEN_FILES) + $(OPCODETARGETGEN) $(OPCODETARGETS_H) + +-Python/ceval.o: $(OPCODETARGETS_H) $(srcdir)/Python/ceval_gil.h ++Python/ceval.o: $(OPCODETARGETS_H) $(srcdir)/Python/ceval_gil.h \ ++ $(srcdir)/Python/ceval_systemtap.h @SYSTEMTAPDEPS@ + + Python/frozen.o: Python/importlib.h Python/importlib_external.h + +@@ -724,6 +726,13 @@ Objects/typeobject.o: $(srcdir)/Objects/ + Objects/typeslots.inc: $(srcdir)/Include/typeslots.h $(srcdir)/Objects/typeslots.py + $(PYTHON) $(srcdir)/Objects/typeslots.py < $(srcdir)/Include/typeslots.h > Objects/typeslots.inc + ++# Only needed with --with-systemtap; not a public header: ++$(srcdir)/Python/pysystemtap.h: $(srcdir)/Python/pysystemtap.d ++ dtrace -o $@ $(DFLAGS) -C -h -s $(srcdir)/Python/pysystemtap.d ++ ++Python/pysystemtap.o: $(srcdir)/Python/pysystemtap.d Python/ceval.o ++ dtrace -o $@ $(DFLAGS) -C -G -s $(srcdir)/Python/pysystemtap.d Python/ceval.o ++ + ############################################################################ + # Header files + +@@ -1345,6 +1354,7 @@ clean: pycremoval + -rm -f Lib/lib2to3/*Grammar*.pickle + -rm -f Programs/_testembed Programs/_freeze_importlib + -rm -rf build ++ -rm -f $(srcdir)/Python/pysystemtap.h + + profile-removal: + find . -name '*.gc??' -exec rm -f {} ';' +diff -up Python-3.3.0rc2/pyconfig.h.in.systemtap Python-3.3.0rc2/pyconfig.h.in +--- Python-3.3.0rc2/pyconfig.h.in.systemtap 2012-09-09 05:11:14.000000000 -0400 ++++ Python-3.3.0rc2/pyconfig.h.in 2012-09-10 09:17:21.120511781 -0400 +@@ -1306,6 +1306,9 @@ + /* Define if you want to compile in Python-specific mallocs */ + #undef WITH_PYMALLOC + ++/* Define if you want to compile in SystemTap support */ ++#undef WITH_SYSTEMTAP ++ + /* Define if you want to compile in rudimentary thread support */ + #undef WITH_THREAD + +diff -up Python-3.3.0rc2/Python/ceval.c.systemtap Python-3.3.0rc2/Python/ceval.c +--- Python-3.3.0rc2/Python/ceval.c.systemtap 2012-09-09 05:11:12.000000000 -0400 ++++ Python-3.3.0rc2/Python/ceval.c 2012-09-10 09:17:21.122511781 -0400 +@@ -18,6 +18,8 @@ + + #include + ++#include "ceval_systemtap.h" ++ + #ifndef WITH_TSC + + #define READ_TIMESTAMP(var) +@@ -1160,6 +1162,10 @@ PyEval_EvalFrameEx(PyFrameObject *f, int + } + } + ++ if (PYTHON_FUNCTION_ENTRY_ENABLED()) { ++ systemtap_function_entry(f); ++ } ++ + co = f->f_code; + names = co->co_names; + consts = co->co_consts; +@@ -3077,6 +3083,11 @@ fast_yield: + + /* pop frame */ + exit_eval_frame: ++ ++ if (PYTHON_FUNCTION_RETURN_ENABLED()) { ++ systemtap_function_return(f); ++ } ++ + Py_LeaveRecursiveCall(); + f->f_executing = 0; + tstate->frame = f->f_back; +diff -up Python-3.3.0rc2/Python/ceval_systemtap.h.systemtap Python-3.3.0rc2/Python/ceval_systemtap.h +--- Python-3.3.0rc2/Python/ceval_systemtap.h.systemtap 2012-09-10 09:17:21.122511781 -0400 ++++ Python-3.3.0rc2/Python/ceval_systemtap.h 2012-09-10 09:17:21.122511781 -0400 +@@ -0,0 +1,86 @@ ++/* ++ Support for SystemTap static markers ++*/ ++ ++#ifdef WITH_SYSTEMTAP ++ ++#include "pysystemtap.h" ++ ++/* ++ A struct to hold all of the information gathered when one of the traceable ++ markers is triggered ++*/ ++struct frame_marker_info ++{ ++ PyObject *filename_obj; ++ PyObject *funcname_obj; ++ const char *filename; ++ const char *funcname; ++ int lineno; ++}; ++ ++static void ++get_frame_marker_info(PyFrameObject *f, struct frame_marker_info *fmi) ++{ ++ PyObject *ptype; ++ PyObject *pvalue; ++ PyObject *ptraceback; ++ ++ PyErr_Fetch(&ptype, &pvalue, &ptraceback); ++ ++ fmi->filename_obj = PyUnicode_EncodeFSDefault(f->f_code->co_filename); ++ if (fmi->filename_obj) { ++ fmi->filename = PyBytes_AsString(fmi->filename_obj); ++ } else { ++ fmi->filename = NULL; ++ } ++ ++ fmi->funcname_obj = PyUnicode_AsUTF8String(f->f_code->co_name); ++ if (fmi->funcname_obj) { ++ fmi->funcname = PyBytes_AsString(fmi->funcname_obj); ++ } else { ++ fmi->funcname = NULL; ++ } ++ ++ fmi->lineno = PyCode_Addr2Line(f->f_code, f->f_lasti); ++ ++ PyErr_Restore(ptype, pvalue, ptraceback); ++ ++} ++ ++static void ++release_frame_marker_info(struct frame_marker_info *fmi) ++{ ++ Py_XDECREF(fmi->filename_obj); ++ Py_XDECREF(fmi->funcname_obj); ++} ++ ++static void ++systemtap_function_entry(PyFrameObject *f) ++{ ++ struct frame_marker_info fmi; ++ get_frame_marker_info(f, &fmi); ++ PYTHON_FUNCTION_ENTRY(fmi.filename, fmi.funcname, fmi.lineno, f); ++ release_frame_marker_info(&fmi); ++} ++ ++static void ++systemtap_function_return(PyFrameObject *f) ++{ ++ struct frame_marker_info fmi; ++ get_frame_marker_info(f, &fmi); ++ PYTHON_FUNCTION_RETURN(fmi.filename, fmi.funcname, fmi.lineno, f); ++ release_frame_marker_info(&fmi); ++} ++ ++#else /* #ifdef WITH_SYSTEMTAP */ ++ ++/* ++ When configured --without-systemtap, everything compiles away to nothing: ++*/ ++#define PYTHON_FUNCTION_ENTRY_ENABLED() 0 ++#define PYTHON_FUNCTION_RETURN_ENABLED() 0 ++#define systemtap_function_entry(f) ++#define systemtap_function_return(f) ++ ++#endif +diff -up Python-3.3.0rc2/Python/pysystemtap.d.systemtap Python-3.3.0rc2/Python/pysystemtap.d +--- Python-3.3.0rc2/Python/pysystemtap.d.systemtap 2012-09-10 09:17:21.122511781 -0400 ++++ Python-3.3.0rc2/Python/pysystemtap.d 2012-09-10 09:17:21.122511781 -0400 +@@ -0,0 +1,4 @@ ++provider python { ++ probe function__entry(const char *, const char *, int, PyFrameObject *); ++ probe function__return(const char *, const char *, int, PyFrameObject *); ++}; diff --git a/00102-lib64.patch b/00102-lib64.patch new file mode 100644 index 0000000..61f9b52 --- /dev/null +++ b/00102-lib64.patch @@ -0,0 +1,203 @@ +diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py +index 0258d3d..4b969bf 100644 +--- a/Lib/distutils/command/install.py ++++ b/Lib/distutils/command/install.py +@@ -30,14 +30,14 @@ WINDOWS_SCHEME = { + INSTALL_SCHEMES = { + 'unix_prefix': { + 'purelib': '$base/lib/python$py_version_short/site-packages', +- 'platlib': '$platbase/lib/python$py_version_short/site-packages', ++ 'platlib': '$platbase/lib64/python$py_version_short/site-packages', + 'headers': '$base/include/python$py_version_short$abiflags/$dist_name', + 'scripts': '$base/bin', + 'data' : '$base', + }, + 'unix_home': { + 'purelib': '$base/lib/python', +- 'platlib': '$base/lib/python', ++ 'platlib': '$base/lib64/python', + 'headers': '$base/include/python/$dist_name', + 'scripts': '$base/bin', + 'data' : '$base', +diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py +index e07a6c8..554740d 100644 +--- a/Lib/distutils/sysconfig.py ++++ b/Lib/distutils/sysconfig.py +@@ -129,8 +129,12 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): + prefix = plat_specific and EXEC_PREFIX or PREFIX + + if os.name == "posix": ++ if plat_specific or standard_lib: ++ lib = "lib64" ++ else: ++ lib = "lib" + libpython = os.path.join(prefix, +- "lib", "python" + get_python_version()) ++ lib, "python" + get_python_version()) + if standard_lib: + return libpython + else: +diff --git a/Lib/distutils/tests/test_install.py b/Lib/distutils/tests/test_install.py +index 287ab19..d4c05e0 100644 +--- a/Lib/distutils/tests/test_install.py ++++ b/Lib/distutils/tests/test_install.py +@@ -57,8 +57,9 @@ class InstallTestCase(support.TempdirManager, + self.assertEqual(got, expected) + + libdir = os.path.join(destination, "lib", "python") ++ platlibdir = os.path.join(destination, "lib64", "python") + check_path(cmd.install_lib, libdir) +- check_path(cmd.install_platlib, libdir) ++ check_path(cmd.install_platlib, platlibdir) + check_path(cmd.install_purelib, libdir) + check_path(cmd.install_headers, + os.path.join(destination, "include", "python", "foopkg")) +diff --git a/Lib/site.py b/Lib/site.py +index 7dc1b04..85016b4 100644 +--- a/Lib/site.py ++++ b/Lib/site.py +@@ -334,11 +334,15 @@ def getsitepackages(prefixes=None): + seen.add(prefix) + + if os.sep == '/': ++ sitepackages.append(os.path.join(prefix, "lib64", ++ "python" + sys.version[:3], ++ "site-packages")) + sitepackages.append(os.path.join(prefix, "lib", + "python%d.%d" % sys.version_info[:2], + "site-packages")) + else: + sitepackages.append(prefix) ++ sitepackages.append(os.path.join(prefix, "lib64", "site-packages")) + sitepackages.append(os.path.join(prefix, "lib", "site-packages")) + # for framework builds *only* we add the standard Apple locations. + if sys.platform == "darwin" and sys._framework: +diff --git a/Lib/sysconfig.py b/Lib/sysconfig.py +index 9ee4d31..53c8606 100644 +--- a/Lib/sysconfig.py ++++ b/Lib/sysconfig.py +@@ -20,10 +20,10 @@ __all__ = [ + + _INSTALL_SCHEMES = { + 'posix_prefix': { +- 'stdlib': '{installed_base}/lib/python{py_version_short}', +- 'platstdlib': '{platbase}/lib/python{py_version_short}', ++ 'stdlib': '{installed_base}/lib64/python{py_version_short}', ++ 'platstdlib': '{platbase}/lib64/python{py_version_short}', + 'purelib': '{base}/lib/python{py_version_short}/site-packages', +- 'platlib': '{platbase}/lib/python{py_version_short}/site-packages', ++ 'platlib': '{platbase}/lib64/python{py_version_short}/site-packages', + 'include': + '{installed_base}/include/python{py_version_short}{abiflags}', + 'platinclude': +@@ -62,10 +62,10 @@ _INSTALL_SCHEMES = { + 'data': '{userbase}', + }, + 'posix_user': { +- 'stdlib': '{userbase}/lib/python{py_version_short}', +- 'platstdlib': '{userbase}/lib/python{py_version_short}', ++ 'stdlib': '{userbase}/lib64/python{py_version_short}', ++ 'platstdlib': '{userbase}/lib64/python{py_version_short}', + 'purelib': '{userbase}/lib/python{py_version_short}/site-packages', +- 'platlib': '{userbase}/lib/python{py_version_short}/site-packages', ++ 'platlib': '{userbase}/lib64/python{py_version_short}/site-packages', + 'include': '{userbase}/include/python{py_version_short}', + 'scripts': '{userbase}/bin', + 'data': '{userbase}', +diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py +index 99e7b4f..c4c98a6 100644 +--- a/Lib/test/test_site.py ++++ b/Lib/test/test_site.py +@@ -275,8 +275,8 @@ class HelperFunctionsTests(unittest.TestCase): + self.assertEqual(dirs[1], wanted) + elif os.sep == '/': + # OS X non-framwework builds, Linux, FreeBSD, etc +- self.assertEqual(len(dirs), 1) +- wanted = os.path.join('xoxo', 'lib', ++ self.assertEqual(len(dirs), 2) ++ wanted = os.path.join('xoxo', 'lib64', + 'python%d.%d' % sys.version_info[:2], + 'site-packages') + self.assertEqual(dirs[0], wanted) +diff --git a/Makefile.pre.in b/Makefile.pre.in +index e8df8f7..a5a9d5e 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -133,7 +133,7 @@ LIBDIR= @libdir@ + MANDIR= @mandir@ + INCLUDEDIR= @includedir@ + CONFINCLUDEDIR= $(exec_prefix)/include +-SCRIPTDIR= $(prefix)/lib ++SCRIPTDIR= $(prefix)/lib64 + ABIFLAGS= @ABIFLAGS@ + + # Detailed destination directories +diff --git a/Modules/getpath.c b/Modules/getpath.c +index dd3387a..1258fcd 100644 +--- a/Modules/getpath.c ++++ b/Modules/getpath.c +@@ -494,7 +494,7 @@ calculate_path(void) + _pythonpath = Py_DecodeLocale(PYTHONPATH, NULL); + _prefix = Py_DecodeLocale(PREFIX, NULL); + _exec_prefix = Py_DecodeLocale(EXEC_PREFIX, NULL); +- lib_python = Py_DecodeLocale("lib/python" VERSION, NULL); ++ lib_python = Py_DecodeLocale("lib64/python" VERSION, NULL); + + if (!_pythonpath || !_prefix || !_exec_prefix || !lib_python) { + Py_FatalError( +@@ -683,7 +683,7 @@ calculate_path(void) + } + else + wcsncpy(zip_path, _prefix, MAXPATHLEN); +- joinpath(zip_path, L"lib/python00.zip"); ++ joinpath(zip_path, L"lib64/python00.zip"); + bufsz = wcslen(zip_path); /* Replace "00" with version */ + zip_path[bufsz - 6] = VERSION[0]; + zip_path[bufsz - 5] = VERSION[2]; +@@ -695,7 +695,7 @@ calculate_path(void) + fprintf(stderr, + "Could not find platform dependent libraries \n"); + wcsncpy(exec_prefix, _exec_prefix, MAXPATHLEN); +- joinpath(exec_prefix, L"lib/lib-dynload"); ++ joinpath(exec_prefix, L"lib64/lib-dynload"); + } + /* If we found EXEC_PREFIX do *not* reduce it! (Yet.) */ + +diff --git a/setup.py b/setup.py +index 11c4ec6..c3e5512 100644 +--- a/setup.py ++++ b/setup.py +@@ -513,7 +513,7 @@ class PyBuildExt(build_ext): + # directories (i.e. '.' and 'Include') must be first. See issue + # 10520. + if not cross_compiling: +- add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') ++ add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib64') + add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') + # only change this for cross builds for 3.3, issues on Mageia + if cross_compiling: +@@ -809,11 +809,11 @@ class PyBuildExt(build_ext): + elif curses_library: + readline_libs.append(curses_library) + elif self.compiler.find_library_file(lib_dirs + +- ['/usr/lib/termcap'], ++ ['/usr/lib64/termcap'], + 'termcap'): + readline_libs.append('termcap') + exts.append( Extension('readline', ['readline.c'], +- library_dirs=['/usr/lib/termcap'], ++ library_dirs=['/usr/lib64/termcap'], + extra_link_args=readline_extra_link_args, + libraries=readline_libs) ) + else: +@@ -850,8 +850,8 @@ class PyBuildExt(build_ext): + if krb5_h: + ssl_incs += krb5_h + ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, +- ['/usr/local/ssl/lib', +- '/usr/contrib/ssl/lib/' ++ ['/usr/local/ssl/lib64', ++ '/usr/contrib/ssl/lib64/' + ] ) + + if (ssl_incs is not None and diff --git a/00111-no-static-lib.patch b/00111-no-static-lib.patch new file mode 100644 index 0000000..50cc13a --- /dev/null +++ b/00111-no-static-lib.patch @@ -0,0 +1,53 @@ +diff --git a/Makefile.pre.in b/Makefile.pre.in +index a5a9d5e..51e8132 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -530,7 +530,7 @@ clinic: $(BUILDPYTHON) $(srcdir)/Modules/_blake2/blake2s_impl.c + $(RUNSHARED) $(PYTHON_FOR_BUILD) $(srcdir)/Tools/clinic/clinic.py --make --srcdir $(srcdir) + + # Build the interpreter +-$(BUILDPYTHON): Programs/python.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY) ++$(BUILDPYTHON): Programs/python.o $(LDLIBRARY) $(PY3LIBRARY) + $(LINKCC) $(PY_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) + + platform: $(BUILDPYTHON) pybuilddir.txt +@@ -574,12 +574,6 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o + _TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build + +- +-# Build static library +-$(LIBRARY): $(LIBRARY_OBJS) +- -rm -f $@ +- $(AR) $(ARFLAGS) $@ $(LIBRARY_OBJS) +- + libpython$(LDVERSION).so: $(LIBRARY_OBJS) + if test $(INSTSONAME) != $(LDLIBRARY); then \ + $(BLDSHARED) -Wl,-h$(INSTSONAME) -o $(INSTSONAME) $(LIBRARY_OBJS) $(MODLIBS) $(SHLIBS) $(LIBC) $(LIBM) $(LDLAST); \ +@@ -667,7 +661,7 @@ Modules/Setup: $(srcdir)/Modules/Setup.dist + echo "-----------------------------------------------"; \ + fi + +-Programs/_testembed: Programs/_testembed.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY) ++Programs/_testembed: Programs/_testembed.o $(LDLIBRARY) $(PY3LIBRARY) + $(LINKCC) $(PY_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) + + ############################################################################ +@@ -1408,17 +1402,6 @@ libainstall: @DEF_MAKE_RULE@ python-config + else true; \ + fi; \ + done +- @if test -d $(LIBRARY); then :; else \ +- if test "$(PYTHONFRAMEWORKDIR)" = no-framework; then \ +- if test "$(SHLIB_SUFFIX)" = .dll; then \ +- $(INSTALL_DATA) $(LDLIBRARY) $(DESTDIR)$(LIBPL) ; \ +- else \ +- $(INSTALL_DATA) $(LIBRARY) $(DESTDIR)$(LIBPL)/$(LIBRARY) ; \ +- fi; \ +- else \ +- echo Skip install of $(LIBRARY) - use make frameworkinstall; \ +- fi; \ +- fi + $(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c + $(INSTALL_DATA) Programs/python.o $(DESTDIR)$(LIBPL)/python.o + $(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in diff --git a/00132-add-rpmbuild-hooks-to-unittest.patch b/00132-add-rpmbuild-hooks-to-unittest.patch new file mode 100644 index 0000000..77dc6ec --- /dev/null +++ b/00132-add-rpmbuild-hooks-to-unittest.patch @@ -0,0 +1,46 @@ +diff -up Python-3.2.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest Python-3.2.2/Lib/unittest/case.py +--- Python-3.2.2/Lib/unittest/case.py.add-rpmbuild-hooks-to-unittest 2011-09-03 12:16:44.000000000 -0400 ++++ Python-3.2.2/Lib/unittest/case.py 2011-09-09 06:35:16.365568382 -0400 +@@ -3,6 +3,7 @@ + import sys + import functools + import difflib ++import os + import logging + import pprint + import re +@@ -101,5 +102,21 @@ def expectedFailure(func): + raise self.test_case.failureException(msg) + ++# Non-standard/downstream-only hooks for handling issues with specific test ++# cases: ++ ++def _skipInRpmBuild(reason): ++ """ ++ Non-standard/downstream-only decorator for marking a specific unit test ++ to be skipped when run within the %check of an rpmbuild. ++ ++ Specifically, this takes effect when WITHIN_PYTHON_RPM_BUILD is set within ++ the environment, and has no effect otherwise. ++ """ ++ if 'WITHIN_PYTHON_RPM_BUILD' in os.environ: ++ return skip(reason) ++ else: ++ return _id ++ + class _AssertRaisesBaseContext(_BaseTestCaseContext): + + def __init__(self, expected, test_case, expected_regex=None): +diff -up Python-3.2.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest Python-3.2.2/Lib/unittest/__init__.py +--- Python-3.2.2/Lib/unittest/__init__.py.add-rpmbuild-hooks-to-unittest 2011-09-03 12:16:44.000000000 -0400 ++++ Python-3.2.2/Lib/unittest/__init__.py 2011-09-09 06:35:16.366568382 -0400 +@@ -57,7 +57,8 @@ __unittest = True + + from .result import TestResult + from .case import (TestCase, FunctionTestCase, SkipTest, skip, skipIf, +- skipUnless, expectedFailure) ++ skipUnless, expectedFailure, ++ _skipInRpmBuild) + from .suite import BaseTestSuite, TestSuite + from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames, + findTestCases) diff --git a/00155-avoid-ctypes-thunks.patch b/00155-avoid-ctypes-thunks.patch new file mode 100644 index 0000000..f03890e --- /dev/null +++ b/00155-avoid-ctypes-thunks.patch @@ -0,0 +1,15 @@ +diff -up Python-3.2.3/Lib/ctypes/__init__.py.rhbz814391 Python-3.2.3/Lib/ctypes/__init__.py +--- Python-3.2.3/Lib/ctypes/__init__.py.rhbz814391 2012-04-20 15:12:49.017867692 -0400 ++++ Python-3.2.3/Lib/ctypes/__init__.py 2012-04-20 15:15:09.501111408 -0400 +@@ -275,11 +275,6 @@ def _reset_cache(): + # _SimpleCData.c_char_p_from_param + POINTER(c_char).from_param = c_char_p.from_param + _pointer_type_cache[None] = c_void_p +- # XXX for whatever reasons, creating the first instance of a callback +- # function is needed for the unittests on Win64 to succeed. This MAY +- # be a compiler bug, since the problem occurs only when _ctypes is +- # compiled with the MS SDK compiler. Or an uninitialized variable? +- CFUNCTYPE(c_int)(lambda: None) + + def create_unicode_buffer(init, size=None): + """create_unicode_buffer(aString) -> character array diff --git a/00160-disable-test_fs_holes-in-rpm-build.patch b/00160-disable-test_fs_holes-in-rpm-build.patch new file mode 100644 index 0000000..9fa91d5 --- /dev/null +++ b/00160-disable-test_fs_holes-in-rpm-build.patch @@ -0,0 +1,11 @@ +diff -up cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build cpython-59223da36dec/Lib/test/test_posix.py +--- cpython-59223da36dec/Lib/test/test_posix.py.disable-test_fs_holes-in-rpm-build 2012-08-07 17:15:59.000000000 -0400 ++++ cpython-59223da36dec/Lib/test/test_posix.py 2012-08-07 17:16:53.528330330 -0400 +@@ -973,6 +973,7 @@ class PosixTester(unittest.TestCase): + posix.RTLD_GLOBAL + posix.RTLD_LOCAL + ++ @unittest._skipInRpmBuild('running kernel may not match kernel in chroot') + @unittest.skipUnless(hasattr(os, 'SEEK_HOLE'), + "test needs an OS that reports file holes") + def test_fs_holes(self): diff --git a/00163-disable-parts-of-test_socket-in-rpm-build.patch b/00163-disable-parts-of-test_socket-in-rpm-build.patch new file mode 100644 index 0000000..0e28036 --- /dev/null +++ b/00163-disable-parts-of-test_socket-in-rpm-build.patch @@ -0,0 +1,11 @@ +diff -up Python-3.3.0b1/Lib/test/test_socket.py.disable-test_socket-in-rpm-builds Python-3.3.0b1/Lib/test/test_socket.py +--- Python-3.3.0b1/Lib/test/test_socket.py.disable-test_socket-in-rpm-builds 2012-07-24 15:02:30.823355067 -0400 ++++ Python-3.3.0b1/Lib/test/test_socket.py 2012-07-24 15:08:13.021354999 -0400 +@@ -2188,6 +2188,7 @@ class RecvmsgGenericStreamTests(RecvmsgG + # Tests which require a stream socket and can use either recvmsg() + # or recvmsg_into(). + ++ @unittest._skipInRpmBuild('fails intermittently when run within Koji') + def testRecvmsgEOF(self): + # Receive end-of-stream indicator (b"", peer socket closed). + msg, ancdata, flags, addr = self.doRecvmsg(self.serv_sock, 1024) diff --git a/00170-gc-assertions.patch b/00170-gc-assertions.patch new file mode 100644 index 0000000..1f71939 --- /dev/null +++ b/00170-gc-assertions.patch @@ -0,0 +1,310 @@ +diff --git a/Include/object.h b/Include/object.h +index cb57359..f928f97 100644 +--- a/Include/object.h ++++ b/Include/object.h +@@ -1069,6 +1069,49 @@ PyAPI_FUNC(void) + _PyObject_DebugTypeStats(FILE *out); + #endif /* ifndef Py_LIMITED_API */ + ++/* ++ Define a pair of assertion macros. ++ ++ These work like the regular C assert(), in that they will abort the ++ process with a message on stderr if the given condition fails to hold, ++ but compile away to nothing if NDEBUG is defined. ++ ++ However, before aborting, Python will also try to call _PyObject_Dump() on ++ the given object. This may be of use when investigating bugs in which a ++ particular object is corrupt (e.g. buggy a tp_visit method in an extension ++ module breaking the garbage collector), to help locate the broken objects. ++ ++ The WITH_MSG variant allows you to supply an additional message that Python ++ will attempt to print to stderr, after the object dump. ++*/ ++#ifdef NDEBUG ++/* No debugging: compile away the assertions: */ ++#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) ((void)0) ++#else ++/* With debugging: generate checks: */ ++#define PyObject_ASSERT_WITH_MSG(obj, expr, msg) \ ++ ((expr) \ ++ ? (void)(0) \ ++ : _PyObject_AssertFailed((obj), \ ++ (msg), \ ++ (__STRING(expr)), \ ++ (__FILE__), \ ++ (__LINE__), \ ++ (__PRETTY_FUNCTION__))) ++#endif ++ ++#define PyObject_ASSERT(obj, expr) \ ++ PyObject_ASSERT_WITH_MSG(obj, expr, NULL) ++ ++/* ++ Declare and define the entrypoint even when NDEBUG is defined, to avoid ++ causing compiler/linker errors when building extensions without NDEBUG ++ against a Python built with NDEBUG defined ++*/ ++PyAPI_FUNC(void) _PyObject_AssertFailed(PyObject *, const char *, ++ const char *, const char *, int, ++ const char *); ++ + #ifdef __cplusplus + } + #endif +diff --git a/Lib/test/test_gc.py b/Lib/test/test_gc.py +index 904fc7d..5676007 100644 +--- a/Lib/test/test_gc.py ++++ b/Lib/test/test_gc.py +@@ -1,10 +1,11 @@ + import unittest + from test.support import (verbose, refcount_test, run_unittest, + strip_python_stderr, cpython_only, start_threads, +- temp_dir, requires_type_collecting) ++ temp_dir, import_module, requires_type_collecting) + from test.support.script_helper import assert_python_ok, make_script + + import sys ++import sysconfig + import time + import gc + import weakref +@@ -46,6 +47,8 @@ class GC_Detector(object): + # gc collects it. + self.wr = weakref.ref(C1055820(666), it_happened) + ++BUILD_WITH_NDEBUG = ('-DNDEBUG' in sysconfig.get_config_vars()['PY_CFLAGS']) ++ + @with_tp_del + class Uncollectable(object): + """Create a reference cycle with multiple __del__ methods. +@@ -863,6 +866,50 @@ class GCCallbackTests(unittest.TestCase): + self.assertEqual(len(gc.garbage), 0) + + ++ @unittest.skipIf(BUILD_WITH_NDEBUG, ++ 'built with -NDEBUG') ++ def test_refcount_errors(self): ++ self.preclean() ++ # Verify the "handling" of objects with broken refcounts ++ import_module("ctypes") #skip if not supported ++ ++ import subprocess ++ code = '''if 1: ++ a = [] ++ b = [a] ++ ++ # Simulate the refcount of "a" being too low (compared to the ++ # references held on it by live data), but keeping it above zero ++ # (to avoid deallocating it): ++ import ctypes ++ ctypes.pythonapi.Py_DecRef(ctypes.py_object(a)) ++ ++ # The garbage collector should now have a fatal error when it reaches ++ # the broken object: ++ import gc ++ gc.collect() ++ ''' ++ p = subprocess.Popen([sys.executable, "-c", code], ++ stdout=subprocess.PIPE, ++ stderr=subprocess.PIPE) ++ stdout, stderr = p.communicate() ++ p.stdout.close() ++ p.stderr.close() ++ # Verify that stderr has a useful error message: ++ self.assertRegex(stderr, ++ b'Modules/gcmodule.c:[0-9]+: visit_decref: Assertion "\(\(gc\)->gc.gc_refs >> \(1\)\) != 0" failed.') ++ self.assertRegex(stderr, ++ b'refcount was too small') ++ self.assertRegex(stderr, ++ b'object : \[\]') ++ self.assertRegex(stderr, ++ b'type : list') ++ self.assertRegex(stderr, ++ b'refcount: 1') ++ self.assertRegex(stderr, ++ b'address : 0x[0-9a-f]+') ++ ++ + class GCTogglingTests(unittest.TestCase): + def setUp(self): + gc.enable() +diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c +index 6e26c7a..8410206 100644 +--- a/Modules/gcmodule.c ++++ b/Modules/gcmodule.c +@@ -238,7 +238,8 @@ update_refs(PyGC_Head *containers) + { + PyGC_Head *gc = containers->gc.gc_next; + for (; gc != containers; gc = gc->gc.gc_next) { +- assert(_PyGCHead_REFS(gc) == GC_REACHABLE); ++ PyObject_ASSERT(FROM_GC(gc), ++ _PyGCHead_REFS(gc) == GC_REACHABLE); + _PyGCHead_SET_REFS(gc, Py_REFCNT(FROM_GC(gc))); + /* Python's cyclic gc should never see an incoming refcount + * of 0: if something decref'ed to 0, it should have been +@@ -258,7 +259,8 @@ update_refs(PyGC_Head *containers) + * so serious that maybe this should be a release-build + * check instead of an assert? + */ +- assert(_PyGCHead_REFS(gc) != 0); ++ PyObject_ASSERT(FROM_GC(gc), ++ _PyGCHead_REFS(gc) != 0); + } + } + +@@ -273,7 +275,9 @@ visit_decref(PyObject *op, void *data) + * generation being collected, which can be recognized + * because only they have positive gc_refs. + */ +- assert(_PyGCHead_REFS(gc) != 0); /* else refcount was too small */ ++ PyObject_ASSERT_WITH_MSG(FROM_GC(gc), ++ _PyGCHead_REFS(gc) != 0, ++ "refcount was too small"); /* else refcount was too small */ + if (_PyGCHead_REFS(gc) > 0) + _PyGCHead_DECREF(gc); + } +@@ -333,9 +337,10 @@ visit_reachable(PyObject *op, PyGC_Head *reachable) + * If gc_refs == GC_UNTRACKED, it must be ignored. + */ + else { +- assert(gc_refs > 0 +- || gc_refs == GC_REACHABLE +- || gc_refs == GC_UNTRACKED); ++ PyObject_ASSERT(FROM_GC(gc), ++ gc_refs > 0 ++ || gc_refs == GC_REACHABLE ++ || gc_refs == GC_UNTRACKED); + } + } + return 0; +@@ -377,7 +382,7 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable) + */ + PyObject *op = FROM_GC(gc); + traverseproc traverse = Py_TYPE(op)->tp_traverse; +- assert(_PyGCHead_REFS(gc) > 0); ++ PyObject_ASSERT(op, _PyGCHead_REFS(gc) > 0); + _PyGCHead_SET_REFS(gc, GC_REACHABLE); + (void) traverse(op, + (visitproc)visit_reachable, +@@ -440,7 +445,7 @@ move_legacy_finalizers(PyGC_Head *unreachable, PyGC_Head *finalizers) + for (gc = unreachable->gc.gc_next; gc != unreachable; gc = next) { + PyObject *op = FROM_GC(gc); + +- assert(IS_TENTATIVELY_UNREACHABLE(op)); ++ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op)); + next = gc->gc.gc_next; + + if (has_legacy_finalizer(op)) { +@@ -516,7 +521,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + PyWeakReference **wrlist; + + op = FROM_GC(gc); +- assert(IS_TENTATIVELY_UNREACHABLE(op)); ++ PyObject_ASSERT(op, IS_TENTATIVELY_UNREACHABLE(op)); + next = gc->gc.gc_next; + + if (! PyType_SUPPORTS_WEAKREFS(Py_TYPE(op))) +@@ -537,9 +542,9 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + * the callback pointer intact. Obscure: it also + * changes *wrlist. + */ +- assert(wr->wr_object == op); ++ PyObject_ASSERT(wr->wr_object, wr->wr_object == op); + _PyWeakref_ClearRef(wr); +- assert(wr->wr_object == Py_None); ++ PyObject_ASSERT(wr->wr_object, wr->wr_object == Py_None); + if (wr->wr_callback == NULL) + continue; /* no callback */ + +@@ -573,7 +578,7 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + */ + if (IS_TENTATIVELY_UNREACHABLE(wr)) + continue; +- assert(IS_REACHABLE(wr)); ++ PyObject_ASSERT(op, IS_REACHABLE(wr)); + + /* Create a new reference so that wr can't go away + * before we can process it again. +@@ -582,7 +587,8 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + + /* Move wr to wrcb_to_call, for the next pass. */ + wrasgc = AS_GC(wr); +- assert(wrasgc != next); /* wrasgc is reachable, but ++ PyObject_ASSERT(op, wrasgc != next); ++ /* wrasgc is reachable, but + next isn't, so they can't + be the same */ + gc_list_move(wrasgc, &wrcb_to_call); +@@ -598,11 +604,11 @@ handle_weakrefs(PyGC_Head *unreachable, PyGC_Head *old) + + gc = wrcb_to_call.gc.gc_next; + op = FROM_GC(gc); +- assert(IS_REACHABLE(op)); +- assert(PyWeakref_Check(op)); ++ PyObject_ASSERT(op, IS_REACHABLE(op)); ++ PyObject_ASSERT(op, PyWeakref_Check(op)); + wr = (PyWeakReference *)op; + callback = wr->wr_callback; +- assert(callback != NULL); ++ PyObject_ASSERT(op, callback != NULL); + + /* copy-paste of weakrefobject.c's handle_callback() */ + temp = PyObject_CallFunctionObjArgs(callback, wr, NULL); +@@ -719,12 +725,14 @@ check_garbage(PyGC_Head *collectable) + for (gc = collectable->gc.gc_next; gc != collectable; + gc = gc->gc.gc_next) { + _PyGCHead_SET_REFS(gc, Py_REFCNT(FROM_GC(gc))); +- assert(_PyGCHead_REFS(gc) != 0); ++ PyObject_ASSERT(FROM_GC(gc), ++ _PyGCHead_REFS(gc) != 0); + } + subtract_refs(collectable); + for (gc = collectable->gc.gc_next; gc != collectable; + gc = gc->gc.gc_next) { +- assert(_PyGCHead_REFS(gc) >= 0); ++ PyObject_ASSERT(FROM_GC(gc), ++ _PyGCHead_REFS(gc) >= 0); + if (_PyGCHead_REFS(gc) != 0) + return -1; + } +diff --git a/Objects/object.c b/Objects/object.c +index ed8a62a..5279b15 100644 +--- a/Objects/object.c ++++ b/Objects/object.c +@@ -2116,6 +2116,35 @@ _PyTrash_thread_destroy_chain(void) + --tstate->trash_delete_nesting; + } + ++PyAPI_FUNC(void) ++_PyObject_AssertFailed(PyObject *obj, const char *msg, const char *expr, ++ const char *file, int line, const char *function) ++{ ++ fprintf(stderr, ++ "%s:%d: %s: Assertion \"%s\" failed.\n", ++ file, line, function, expr); ++ if (msg) { ++ fprintf(stderr, "%s\n", msg); ++ } ++ ++ fflush(stderr); ++ ++ if (obj) { ++ /* This might succeed or fail, but we're about to abort, so at least ++ try to provide any extra info we can: */ ++ _PyObject_Dump(obj); ++ } ++ else { ++ fprintf(stderr, "NULL object\n"); ++ } ++ ++ fflush(stdout); ++ fflush(stderr); ++ ++ /* Terminate the process: */ ++ abort(); ++} ++ + #ifndef Py_TRACE_REFS + /* For Py_LIMITED_API, we need an out-of-line version of _Py_Dealloc. + Define this here, so we can undefine the macro. */ diff --git a/00178-dont-duplicate-flags-in-sysconfig.patch b/00178-dont-duplicate-flags-in-sysconfig.patch new file mode 100644 index 0000000..fc49b30 --- /dev/null +++ b/00178-dont-duplicate-flags-in-sysconfig.patch @@ -0,0 +1,30 @@ +diff -r 39b9b05c3085 Lib/distutils/sysconfig.py +--- a/Lib/distutils/sysconfig.py Wed Apr 10 00:27:23 2013 +0200 ++++ b/Lib/distutils/sysconfig.py Wed Apr 10 10:14:18 2013 +0200 +@@ -362,7 +362,10 @@ + done[n] = item = "" + if found: + after = value[m.end():] +- value = value[:m.start()] + item + after ++ value = value[:m.start()] ++ if item.strip() not in value: ++ value += item ++ value += after + if "$" in after: + notdone[name] = value + else: +diff -r 39b9b05c3085 Lib/sysconfig.py +--- a/Lib/sysconfig.py Wed Apr 10 00:27:23 2013 +0200 ++++ b/Lib/sysconfig.py Wed Apr 10 10:14:18 2013 +0200 +@@ -296,7 +296,10 @@ + + if found: + after = value[m.end():] +- value = value[:m.start()] + item + after ++ value = value[:m.start()] ++ if item.strip() not in value: ++ value += item ++ value += after + if "$" in after: + notdone[name] = value + else: diff --git a/00189-add-rewheel-module.patch b/00189-add-rewheel-module.patch new file mode 100644 index 0000000..bae64d4 --- /dev/null +++ b/00189-add-rewheel-module.patch @@ -0,0 +1,236 @@ +diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py +index d69e09f..5cb12df 100644 +--- a/Lib/ensurepip/__init__.py ++++ b/Lib/ensurepip/__init__.py +@@ -1,8 +1,10 @@ + import os + import os.path + import pkgutil ++import shutil + import sys + import tempfile ++from ensurepip import rewheel + + + __all__ = ["version", "bootstrap"] +@@ -25,6 +27,8 @@ def _run_pip(args, additional_paths=None): + + # Install the bundled software + import pip ++ if args[0] in ["install", "list", "wheel"]: ++ args.append('--pre') + return pip.main(args) + + +@@ -88,20 +92,39 @@ def _bootstrap(*, root=None, upgrade=False, user=False, + # omit pip and easy_install + os.environ["ENSUREPIP_OPTIONS"] = "install" + ++ whls = [] ++ rewheel_dir = None ++ # try to see if we have system-wide versions of _PROJECTS ++ dep_records = rewheel.find_system_records([p[0] for p in _PROJECTS]) ++ # TODO: check if system-wide versions are the newest ones ++ # if --upgrade is used? ++ if all(dep_records): ++ # if we have all _PROJECTS installed system-wide, we'll recreate ++ # wheels from them and install those ++ rewheel_dir = tempfile.TemporaryDirectory() ++ for dr in dep_records: ++ new_whl = rewheel.rewheel_from_record(dr, rewheel_dir.name) ++ whls.append(os.path.join(rewheel_dir.name, new_whl)) ++ else: ++ # if we don't have all the _PROJECTS installed system-wide, ++ # let's just fall back to bundled wheels ++ for project, version in _PROJECTS: ++ whl = os.path.join( ++ os.path.dirname(__file__), ++ "_bundled", ++ "{}-{}-py2.py3-none-any.whl".format(project, version) ++ ) ++ whls.append(whl) ++ + with tempfile.TemporaryDirectory() as tmpdir: + # Put our bundled wheels into a temporary directory and construct the + # additional paths that need added to sys.path + additional_paths = [] +- for project, version in _PROJECTS: +- wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version) +- whl = pkgutil.get_data( +- "ensurepip", +- "_bundled/{}".format(wheel_name), +- ) +- with open(os.path.join(tmpdir, wheel_name), "wb") as fp: +- fp.write(whl) +- +- additional_paths.append(os.path.join(tmpdir, wheel_name)) ++ for whl in whls: ++ shutil.copy(whl, tmpdir) ++ additional_paths.append(os.path.join(tmpdir, os.path.basename(whl))) ++ if rewheel_dir: ++ rewheel_dir.cleanup() + + # Construct the arguments to be passed to the pip command + args = ["install", "--no-index", "--find-links", tmpdir] +diff --git a/Lib/ensurepip/rewheel/__init__.py b/Lib/ensurepip/rewheel/__init__.py +new file mode 100644 +index 0000000..753c764 +--- /dev/null ++++ b/Lib/ensurepip/rewheel/__init__.py +@@ -0,0 +1,143 @@ ++import argparse ++import codecs ++import csv ++import email.parser ++import os ++import io ++import re ++import site ++import subprocess ++import sys ++import zipfile ++ ++def run(): ++ parser = argparse.ArgumentParser(description='Recreate wheel of package with given RECORD.') ++ parser.add_argument('record_path', ++ help='Path to RECORD file') ++ parser.add_argument('-o', '--output-dir', ++ help='Dir where to place the wheel, defaults to current working dir.', ++ dest='outdir', ++ default=os.path.curdir) ++ ++ ns = parser.parse_args() ++ retcode = 0 ++ try: ++ print(rewheel_from_record(**vars(ns))) ++ except BaseException as e: ++ print('Failed: {}'.format(e)) ++ retcode = 1 ++ sys.exit(1) ++ ++def find_system_records(projects): ++ """Return list of paths to RECORD files for system-installed projects. ++ ++ If a project is not installed, the resulting list contains None instead ++ of a path to its RECORD ++ """ ++ records = [] ++ # get system site-packages dirs ++ sys_sitepack = site.getsitepackages([sys.base_prefix, sys.base_exec_prefix]) ++ sys_sitepack = [sp for sp in sys_sitepack if os.path.exists(sp)] ++ # try to find all projects in all system site-packages ++ for project in projects: ++ path = None ++ for sp in sys_sitepack: ++ dist_info_re = os.path.join(sp, project) + r'-[^\{0}]+\.dist-info'.format(os.sep) ++ candidates = [os.path.join(sp, p) for p in os.listdir(sp)] ++ # filter out candidate dirs based on the above regexp ++ filtered = [c for c in candidates if re.match(dist_info_re, c)] ++ # if we have 0 or 2 or more dirs, something is wrong... ++ if len(filtered) == 1: ++ path = filtered[0] ++ if path is not None: ++ records.append(os.path.join(path, 'RECORD')) ++ else: ++ records.append(None) ++ return records ++ ++def rewheel_from_record(record_path, outdir): ++ """Recreates a whee of package with given record_path and returns path ++ to the newly created wheel.""" ++ site_dir = os.path.dirname(os.path.dirname(record_path)) ++ record_relpath = record_path[len(site_dir):].strip(os.path.sep) ++ to_write, to_omit = get_records_to_pack(site_dir, record_relpath) ++ new_wheel_name = get_wheel_name(record_path) ++ new_wheel_path = os.path.join(outdir, new_wheel_name + '.whl') ++ ++ new_wheel = zipfile.ZipFile(new_wheel_path, mode='w', compression=zipfile.ZIP_DEFLATED) ++ # we need to write a new record with just the files that we will write, ++ # e.g. not binaries and *.pyc/*.pyo files ++ new_record = io.StringIO() ++ writer = csv.writer(new_record) ++ ++ # handle files that we can write straight away ++ for f, sha_hash, size in to_write: ++ new_wheel.write(os.path.join(site_dir, f), arcname=f) ++ writer.writerow([f, sha_hash,size]) ++ ++ # rewrite the old wheel file with a new computed one ++ writer.writerow([record_relpath, '', '']) ++ new_wheel.writestr(record_relpath, new_record.getvalue()) ++ ++ new_wheel.close() ++ ++ return new_wheel.filename ++ ++def get_wheel_name(record_path): ++ """Return proper name of the wheel, without .whl.""" ++ ++ wheel_info_path = os.path.join(os.path.dirname(record_path), 'WHEEL') ++ with codecs.open(wheel_info_path, encoding='utf-8') as wheel_info_file: ++ wheel_info = email.parser.Parser().parsestr(wheel_info_file.read()) ++ ++ metadata_path = os.path.join(os.path.dirname(record_path), 'METADATA') ++ with codecs.open(metadata_path, encoding='utf-8') as metadata_file: ++ metadata = email.parser.Parser().parsestr(metadata_file.read()) ++ ++ # construct name parts according to wheel spec ++ distribution = metadata.get('Name') ++ version = metadata.get('Version') ++ build_tag = '' # nothing for now ++ lang_tag = [] ++ for t in wheel_info.get_all('Tag'): ++ lang_tag.append(t.split('-')[0]) ++ lang_tag = '.'.join(lang_tag) ++ abi_tag, plat_tag = wheel_info.get('Tag').split('-')[1:3] ++ # leave out build tag, if it is empty ++ to_join = filter(None, [distribution, version, build_tag, lang_tag, abi_tag, plat_tag]) ++ return '-'.join(list(to_join)) ++ ++def get_records_to_pack(site_dir, record_relpath): ++ """Accepts path of sitedir and path of RECORD file relative to it. ++ Returns two lists: ++ - list of files that can be written to new RECORD straight away ++ - list of files that shouldn't be written or need some processing ++ (pyc and pyo files, scripts) ++ """ ++ record_file_path = os.path.join(site_dir, record_relpath) ++ with codecs.open(record_file_path, encoding='utf-8') as record_file: ++ record_contents = record_file.read() ++ # temporary fix for https://github.com/pypa/pip/issues/1376 ++ # we need to ignore files under ".data" directory ++ data_dir = os.path.dirname(record_relpath).strip(os.path.sep) ++ data_dir = data_dir[:-len('dist-info')] + 'data' ++ ++ to_write = [] ++ to_omit = [] ++ for l in record_contents.splitlines(): ++ spl = l.split(',') ++ if len(spl) == 3: ++ # new record will omit (or write differently): ++ # - abs paths, paths with ".." (entry points), ++ # - pyc+pyo files ++ # - the old RECORD file ++ # TODO: is there any better way to recognize an entry point? ++ if os.path.isabs(spl[0]) or spl[0].startswith('..') or \ ++ spl[0].endswith('.pyc') or spl[0].endswith('.pyo') or \ ++ spl[0] == record_relpath or spl[0].startswith(data_dir): ++ to_omit.append(spl) ++ else: ++ to_write.append(spl) ++ else: ++ pass # bad RECORD or empty line ++ return to_write, to_omit +diff --git a/Makefile.pre.in b/Makefile.pre.in +index 51e8132..42b3d29 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -1241,7 +1241,7 @@ LIBSUBDIRS= tkinter tkinter/test tkinter/test/test_tkinter \ + test/test_asyncio \ + collections concurrent concurrent/futures encodings \ + email email/mime test/test_email test/test_email/data \ +- ensurepip ensurepip/_bundled \ ++ ensurepip ensurepip/_bundled ensurepip/rewheel \ + html json test/test_json http dbm xmlrpc \ + sqlite3 sqlite3/test \ + logging csv wsgiref urllib \ diff --git a/00205-make-libpl-respect-lib64.patch b/00205-make-libpl-respect-lib64.patch new file mode 100644 index 0000000..3e7c797 --- /dev/null +++ b/00205-make-libpl-respect-lib64.patch @@ -0,0 +1,12 @@ +diff -up Python-3.5.0/Makefile.pre.in.lib Python-3.5.0/Makefile.pre.in +--- Python-3.5.0/Makefile.pre.in.lib 2015-09-21 15:39:47.928286620 +0200 ++++ Python-3.5.0/Makefile.pre.in 2015-09-21 15:42:58.004042762 +0200 +@@ -1340,7 +1340,7 @@ inclinstall: + + # Install the library and miscellaneous stuff needed for extending/embedding + # This goes into $(exec_prefix) +-LIBPL= @LIBPL@ ++LIBPL= $(LIBDEST)/config-$(LDVERSION)-$(MULTIARCH) + + # pkgconfig directory + LIBPC= $(LIBDIR)/pkgconfig diff --git a/00251-change-user-install-location.patch b/00251-change-user-install-location.patch new file mode 100644 index 0000000..1dcd9f2 --- /dev/null +++ b/00251-change-user-install-location.patch @@ -0,0 +1,46 @@ +diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py +index 9d31d13..ed44a93 100644 +--- a/Lib/distutils/command/install.py ++++ b/Lib/distutils/command/install.py +@@ -424,8 +424,18 @@ class install(Command): + raise DistutilsOptionError( + "must not supply exec-prefix without prefix") + +- self.prefix = os.path.normpath(sys.prefix) +- self.exec_prefix = os.path.normpath(sys.exec_prefix) ++ # self.prefix is set to sys.prefix + /local/ ++ # if the executable is /usr/bin/python* and RPM build ++ # is not detected to make pip and distutils install into ++ # the separate location. ++ if (sys.executable.startswith("/usr/bin/python") ++ and 'RPM_BUILD_ROOT' not in os.environ): ++ addition = "/local" ++ else: ++ addition = "" ++ ++ self.prefix = os.path.normpath(sys.prefix) + addition ++ self.exec_prefix = os.path.normpath(sys.exec_prefix) + addition + + else: + if self.exec_prefix is None: +diff --git a/Lib/site.py b/Lib/site.py +index 4744eb0..b5fe571 100644 +--- a/Lib/site.py ++++ b/Lib/site.py +@@ -326,7 +326,15 @@ def getsitepackages(prefixes=None): + return sitepackages + + def addsitepackages(known_paths, prefixes=None): +- """Add site-packages to sys.path""" ++ """Add site-packages to sys.path. ++ ++ '/usr/local' is included in PREFIXES if the executable is /usr/bin/python* ++ and RPM build is not detected to make sudo pip installed packages visible. ++ ++ """ ++ if (ENABLE_USER_SITE and sys.executable.startswith("/usr/bin/python") ++ and 'RPM_BUILD_ROOT' not in os.environ): ++ PREFIXES.insert(0, "/usr/local") + for sitedir in getsitepackages(prefixes): + if os.path.isdir(sitedir): + addsitedir(sitedir, known_paths) diff --git a/00264-skip-test-failing-on-aarch64.patch b/00264-skip-test-failing-on-aarch64.patch new file mode 100644 index 0000000..edda219 --- /dev/null +++ b/00264-skip-test-failing-on-aarch64.patch @@ -0,0 +1,12 @@ +diff --git a/Lib/ctypes/test/test_structures.py b/Lib/ctypes/test/test_structures.py +index 3eded77..ad7859a 100644 +--- a/Lib/ctypes/test/test_structures.py ++++ b/Lib/ctypes/test/test_structures.py +@@ -392,6 +392,7 @@ class StructureTestCase(unittest.TestCase): + (1, 0, 0, 0, 0, 0)) + self.assertRaises(TypeError, lambda: Z(1, 2, 3, 4, 5, 6, 7)) + ++ @unittest.skip('Fails on aarch64: http://bugs.python.org/issue29804') + def test_pass_by_value(self): + # This should mirror the structure in Modules/_ctypes/_ctypes_test.c + class X(Structure): diff --git a/00270-fix-ssl-alpn-hook-test.patch b/00270-fix-ssl-alpn-hook-test.patch new file mode 100644 index 0000000..97b433e --- /dev/null +++ b/00270-fix-ssl-alpn-hook-test.patch @@ -0,0 +1,16 @@ +diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py +index d203cdd..c128dae 100644 +--- a/Lib/test/test_ssl.py ++++ b/Lib/test/test_ssl.py +@@ -3256,8 +3256,9 @@ if _have_threads: + except ssl.SSLError as e: + stats = e + +- if expected is None and IS_OPENSSL_1_1: +- # OpenSSL 1.1.0 raises handshake error ++ if (expected is None and IS_OPENSSL_1_1 ++ and ssl.OPENSSL_VERSION_INFO < (1, 1, 0, 6)): ++ # OpenSSL 1.1.0 to 1.1.0e raises handshake error + self.assertIsInstance(stats, ssl.SSLError) + else: + msg = "failed trying %s (s) and %s (c).\n" \ diff --git a/00271-asyncio-get-default-signal-handler.patch b/00271-asyncio-get-default-signal-handler.patch new file mode 100644 index 0000000..8b1bf77 --- /dev/null +++ b/00271-asyncio-get-default-signal-handler.patch @@ -0,0 +1,99 @@ +diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py +index 492a84a2313..9746678607c 100644 +--- a/Lib/test/test_asyncio/test_events.py ++++ b/Lib/test/test_asyncio/test_events.py +@@ -1980,19 +1980,26 @@ def test_subprocess_terminate(self): + + @unittest.skipIf(sys.platform == 'win32', "Don't have SIGHUP") + def test_subprocess_send_signal(self): +- prog = os.path.join(os.path.dirname(__file__), 'echo.py') +- +- connect = self.loop.subprocess_exec( +- functools.partial(MySubprocessProtocol, self.loop), +- sys.executable, prog) +- transp, proto = self.loop.run_until_complete(connect) +- self.assertIsInstance(proto, MySubprocessProtocol) +- self.loop.run_until_complete(proto.connected) +- +- transp.send_signal(signal.SIGHUP) +- self.loop.run_until_complete(proto.completed) +- self.assertEqual(-signal.SIGHUP, proto.returncode) +- transp.close() ++ # bpo-31034: Make sure that we get the default signal handler (killing ++ # the process). The parent process may have decided to ignore SIGHUP, ++ # and signal handlers are inherited. ++ old_handler = signal.signal(signal.SIGHUP, signal.SIG_DFL) ++ try: ++ prog = os.path.join(os.path.dirname(__file__), 'echo.py') ++ ++ connect = self.loop.subprocess_exec( ++ functools.partial(MySubprocessProtocol, self.loop), ++ sys.executable, prog) ++ transp, proto = self.loop.run_until_complete(connect) ++ self.assertIsInstance(proto, MySubprocessProtocol) ++ self.loop.run_until_complete(proto.connected) ++ ++ transp.send_signal(signal.SIGHUP) ++ self.loop.run_until_complete(proto.completed) ++ self.assertEqual(-signal.SIGHUP, proto.returncode) ++ transp.close() ++ finally: ++ signal.signal(signal.SIGHUP, old_handler) + + def test_subprocess_stderr(self): + prog = os.path.join(os.path.dirname(__file__), 'echo2.py') +diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py +index 2e14a8a9735..e8822c36698 100644 +--- a/Lib/test/test_asyncio/test_subprocess.py ++++ b/Lib/test/test_asyncio/test_subprocess.py +@@ -166,25 +166,32 @@ def test_terminate(self): + + @unittest.skipIf(sys.platform == 'win32', "Don't have SIGHUP") + def test_send_signal(self): +- code = 'import time; print("sleeping", flush=True); time.sleep(3600)' +- args = [sys.executable, '-c', code] +- create = asyncio.create_subprocess_exec(*args, +- stdout=subprocess.PIPE, +- loop=self.loop) +- proc = self.loop.run_until_complete(create) +- +- @asyncio.coroutine +- def send_signal(proc): +- # basic synchronization to wait until the program is sleeping +- line = yield from proc.stdout.readline() +- self.assertEqual(line, b'sleeping\n') ++ # bpo-31034: Make sure that we get the default signal handler (killing ++ # the process). The parent process may have decided to ignore SIGHUP, ++ # and signal handlers are inherited. ++ old_handler = signal.signal(signal.SIGHUP, signal.SIG_DFL) ++ try: ++ code = 'import time; print("sleeping", flush=True); time.sleep(3600)' ++ args = [sys.executable, '-c', code] ++ create = asyncio.create_subprocess_exec(*args, ++ stdout=subprocess.PIPE, ++ loop=self.loop) ++ proc = self.loop.run_until_complete(create) + +- proc.send_signal(signal.SIGHUP) +- returncode = (yield from proc.wait()) +- return returncode +- +- returncode = self.loop.run_until_complete(send_signal(proc)) +- self.assertEqual(-signal.SIGHUP, returncode) ++ @asyncio.coroutine ++ def send_signal(proc): ++ # basic synchronization to wait until the program is sleeping ++ line = yield from proc.stdout.readline() ++ self.assertEqual(line, b'sleeping\n') ++ ++ proc.send_signal(signal.SIGHUP) ++ returncode = (yield from proc.wait()) ++ return returncode ++ ++ returncode = self.loop.run_until_complete(send_signal(proc)) ++ self.assertEqual(-signal.SIGHUP, returncode) ++ finally: ++ signal.signal(signal.SIGHUP, old_handler) + + def prepare_broken_pipe_test(self): + # buffer large enough to feed the whole pipe buffer diff --git a/00272-fix-ftplib-to-reject-newlines.patch b/00272-fix-ftplib-to-reject-newlines.patch new file mode 100644 index 0000000..66486a8 --- /dev/null +++ b/00272-fix-ftplib-to-reject-newlines.patch @@ -0,0 +1,58 @@ +From 8c2d4cf092c5f0335e7982392a33927579c4d512 Mon Sep 17 00:00:00 2001 +From: Dong-hee Na +Date: Wed, 26 Jul 2017 21:11:25 +0900 +Subject: [PATCH] [3.6] bpo-30119: fix ftplib.FTP.putline() to throw an error + for a illegal command (#1214) (#2886) + +--- + Lib/ftplib.py | 2 ++ + Lib/test/test_ftplib.py | 6 +++++- + Misc/NEWS.d/next/Library/2017-07-26-15-15-00.bpo-30119.DZ6C_S.rst | 2 ++ + 3 files changed, 9 insertions(+), 1 deletion(-) + create mode 100644 Misc/NEWS.d/next/Library/2017-07-26-15-15-00.bpo-30119.DZ6C_S.rst + +diff --git a/Lib/ftplib.py b/Lib/ftplib.py +index 8f36f537e8a..a02e595cb02 100644 +--- a/Lib/ftplib.py ++++ b/Lib/ftplib.py +@@ -186,6 +186,8 @@ def sanitize(self, s): + + # Internal: send one line to the server, appending CRLF + def putline(self, line): ++ if '\r' in line or '\n' in line: ++ raise ValueError('an illegal newline character should not be contained') + line = line + CRLF + if self.debugging > 1: + print('*put*', self.sanitize(line)) +diff --git a/Lib/test/test_ftplib.py b/Lib/test/test_ftplib.py +index 12fabc5e8be..a561e9efa03 100644 +--- a/Lib/test/test_ftplib.py ++++ b/Lib/test/test_ftplib.py +@@ -484,6 +484,9 @@ def test_sanitize(self): + self.assertEqual(self.client.sanitize('PASS 12345'), repr('PASS *****')) + + def test_exceptions(self): ++ self.assertRaises(ValueError, self.client.sendcmd, 'echo 40\r\n0') ++ self.assertRaises(ValueError, self.client.sendcmd, 'echo 40\n0') ++ self.assertRaises(ValueError, self.client.sendcmd, 'echo 40\r0') + self.assertRaises(ftplib.error_temp, self.client.sendcmd, 'echo 400') + self.assertRaises(ftplib.error_temp, self.client.sendcmd, 'echo 499') + self.assertRaises(ftplib.error_perm, self.client.sendcmd, 'echo 500') +@@ -492,7 +495,8 @@ def test_exceptions(self): + + def test_all_errors(self): + exceptions = (ftplib.error_reply, ftplib.error_temp, ftplib.error_perm, +- ftplib.error_proto, ftplib.Error, OSError, EOFError) ++ ftplib.error_proto, ftplib.Error, OSError, ++ EOFError) + for x in exceptions: + try: + raise x('exception not included in all_errors set') +diff --git a/Misc/NEWS.d/next/Library/2017-07-26-15-15-00.bpo-30119.DZ6C_S.rst b/Misc/NEWS.d/next/Library/2017-07-26-15-15-00.bpo-30119.DZ6C_S.rst +new file mode 100644 +index 00000000000..a37d3703842 +--- /dev/null ++++ b/Misc/NEWS.d/next/Library/2017-07-26-15-15-00.bpo-30119.DZ6C_S.rst +@@ -0,0 +1,2 @@ ++ftplib.FTP.putline() now throws ValueError on commands that contains CR or ++LF. Patch by Dong-hee Na. diff --git a/00273-skip-float-test.patch b/00273-skip-float-test.patch new file mode 100644 index 0000000..233d3ed --- /dev/null +++ b/00273-skip-float-test.patch @@ -0,0 +1,12 @@ +diff --git a/Lib/test/test_float.py b/Lib/test/test_float.py +index 66726d6496d..3318fa5df59 100644 +--- a/Lib/test/test_float.py ++++ b/Lib/test/test_float.py +@@ -141,6 +141,7 @@ class GeneralFloatCases(unittest.TestCase): + # non-UTF-8 byte string + check(b'123\xa0') + ++ @unittest.skip('Fails in Koji: https://bugzilla.redhat.com/show_bug.cgi?id=1484497') + @support.run_with_locale('LC_NUMERIC', 'fr_FR', 'de_DE') + def test_float_with_comma(self): + # set locale to something that doesn't use '.' for the decimal point diff --git a/00274-fix-arch-names.patch b/00274-fix-arch-names.patch new file mode 100644 index 0000000..9d69223 --- /dev/null +++ b/00274-fix-arch-names.patch @@ -0,0 +1,58 @@ +diff -up Python-3.5.0/configure.ac.than Python-3.5.0/configure.ac +--- Python-3.5.0/configure.ac.than 2015-11-13 11:51:32.039560172 -0500 ++++ Python-3.5.0/configure.ac 2015-11-13 11:52:11.670168157 -0500 +@@ -788,9 +788,9 @@ cat >> conftest.c <> conftest.c <> conftest.c <> conftest.c < + + + + idle3.desktop + IDLE3 + CC0 + Python-2.0 + Python 3 Integrated Development and Learning Environment + +

+ IDLE is Python’s Integrated Development and Learning Environment. + The GUI is uniform between Windows, Unix, and Mac OS X. + IDLE provides an easy way to start writing, running, and debugging + Python code. +

+

+ IDLE is written in pure Python, and uses the tkinter GUI toolkit. + It provides: +

+
    +
  • a Python shell window (interactive interpreter) with colorizing of code input, output, and error messages,
  • +
  • a multi-window text editor with multiple undo, Python colorizing, smart indent, call tips, auto completion, and other features,
  • +
  • search within any window, replace within editor windows, and search through multiple files (grep),
  • +
  • a debugger with persistent breakpoints, stepping, and viewing of global and local namespaces.
  • +
+
+ https://docs.python.org/3/library/idle.html + + http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-main-window.png + http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-class-browser.png + http://in.waw.pl/~zbyszek/fedora/idle3-appdata/idle3-code-viewer.png + + zbyszek@in.waw.pl +
diff --git a/idle3.desktop b/idle3.desktop new file mode 100644 index 0000000..dc1d3c3 --- /dev/null +++ b/idle3.desktop @@ -0,0 +1,11 @@ +[Desktop Entry] +Version=1.0 +Name=IDLE 3 +Comment=Python 3 Integrated Development and Learning Environment +Exec=idle3 %F +TryExec=idle3 +Terminal=false +Type=Application +Icon=idle3 +Categories=Development;IDE; +MimeType=text/x-python; \ No newline at end of file diff --git a/libpython.stp b/libpython.stp new file mode 100644 index 0000000..f41da80 --- /dev/null +++ b/libpython.stp @@ -0,0 +1,17 @@ +/* Systemtap tapset to make it easier to trace Python */ + +/* + Define python.function.entry/return: +*/ +probe python.function.entry = process("python3").library("LIBRARY_PATH").mark("function__entry") +{ + filename = user_string($arg1); + funcname = user_string($arg2); + lineno = $arg3; +} +probe python.function.return = process("python3").library("LIBRARY_PATH").mark("function__return") +{ + filename = user_string($arg1); + funcname = user_string($arg2); + lineno = $arg3; +} diff --git a/macros.pybytecompile3.7 b/macros.pybytecompile3.7 new file mode 100644 index 0000000..3968c6e --- /dev/null +++ b/macros.pybytecompile3.7 @@ -0,0 +1,10 @@ +# Note that the path could itself be a python file, or a directory + +# Python's compile_all module only works on directories, and requires a max +# recursion depth + +%py_byte_compile()\ +python_binary="%1"\ +bytecode_compilation_path="%2"\ +find $bytecode_compilation_path -type f -a -name "*.py" -print0 | xargs -0 $python_binary -O -c 'import py_compile, sys; [py_compile.compile(f, dfile=f.partition("$RPM_BUILD_ROOT")[2], optimize=opt) for opt in range(2) for f in sys.argv[1:]]' || :\ +%{nil} diff --git a/macros.systempython b/macros.systempython new file mode 100644 index 0000000..314cd45 --- /dev/null +++ b/macros.systempython @@ -0,0 +1 @@ +%system_python_abi %{expand: } diff --git a/pyfuntop.stp b/pyfuntop.stp new file mode 100644 index 0000000..f235a23 --- /dev/null +++ b/pyfuntop.stp @@ -0,0 +1,21 @@ +#!/usr/bin/stap + +global fn_calls; + +probe python.function.entry +{ + fn_calls[pid(), filename, funcname, lineno] += 1; +} + +probe timer.ms(1000) { + printf("\033[2J\033[1;1H") /* clear screen */ + printf("%6s %80s %6s %30s %6s\n", + "PID", "FILENAME", "LINE", "FUNCTION", "CALLS") + foreach ([pid, filename, funcname, lineno] in fn_calls- limit 20) { + printf("%6d %80s %6d %30s %6d\n", + pid, filename, lineno, funcname, + fn_calls[pid, filename, funcname, lineno]); + } + + delete fn_calls; +} diff --git a/python-gdb.py b/python-gdb.py new file mode 100644 index 0000000..dd6d462 --- /dev/null +++ b/python-gdb.py @@ -0,0 +1,1380 @@ +#!/usr/bin/python +''' +From gdb 7 onwards, gdb's build can be configured --with-python, allowing gdb +to be extended with Python code e.g. for library-specific data visualizations, +such as for the C++ STL types. Documentation on this API can be seen at: +http://sourceware.org/gdb/current/onlinedocs/gdb/Python-API.html + + +This python module deals with the case when the process being debugged (the +"inferior process" in gdb parlance) is itself python, or more specifically, +linked against libpython. In this situation, almost every item of data is a +(PyObject*), and having the debugger merely print their addresses is not very +enlightening. + +This module embeds knowledge about the implementation details of libpython so +that we can emit useful visualizations e.g. a string, a list, a dict, a frame +giving file/line information and the state of local variables + +In particular, given a gdb.Value corresponding to a PyObject* in the inferior +process, we can generate a "proxy value" within the gdb process. For example, +given a PyObject* in the inferior process that is in fact a PyListObject* +holding three PyObject* that turn out to be PyBytesObject* instances, we can +generate a proxy value within the gdb process that is a list of bytes +instances: + [b"foo", b"bar", b"baz"] + +Doing so can be expensive for complicated graphs of objects, and could take +some time, so we also have a "write_repr" method that writes a representation +of the data to a file-like object. This allows us to stop the traversal by +having the file-like object raise an exception if it gets too much data. + +With both "proxyval" and "write_repr" we keep track of the set of all addresses +visited so far in the traversal, to avoid infinite recursion due to cycles in +the graph of object references. + +We try to defer gdb.lookup_type() invocations for python types until as late as +possible: for a dynamically linked python binary, when the process starts in +the debugger, the libpython.so hasn't been dynamically loaded yet, so none of +the type names are known to the debugger + +The module also extends gdb with some python-specific commands. +''' +from __future__ import with_statement +import gdb + +# Look up the gdb.Type for some standard types: +_type_char_ptr = gdb.lookup_type('char').pointer() # char* +_type_unsigned_char_ptr = gdb.lookup_type('unsigned char').pointer() # unsigned char* +_type_void_ptr = gdb.lookup_type('void').pointer() # void* +_type_size_t = gdb.lookup_type('size_t') + +SIZEOF_VOID_P = _type_void_ptr.sizeof + + +Py_TPFLAGS_HEAPTYPE = (1L << 9) + +Py_TPFLAGS_INT_SUBCLASS = (1L << 23) +Py_TPFLAGS_LONG_SUBCLASS = (1L << 24) +Py_TPFLAGS_LIST_SUBCLASS = (1L << 25) +Py_TPFLAGS_TUPLE_SUBCLASS = (1L << 26) +Py_TPFLAGS_BYTES_SUBCLASS = (1L << 27) +Py_TPFLAGS_UNICODE_SUBCLASS = (1L << 28) +Py_TPFLAGS_DICT_SUBCLASS = (1L << 29) +Py_TPFLAGS_BASE_EXC_SUBCLASS = (1L << 30) +Py_TPFLAGS_TYPE_SUBCLASS = (1L << 31) + + +MAX_OUTPUT_LEN=1024 + +class NullPyObjectPtr(RuntimeError): + pass + + +def safety_limit(val): + # Given a integer value from the process being debugged, limit it to some + # safety threshold so that arbitrary breakage within said process doesn't + # break the gdb process too much (e.g. sizes of iterations, sizes of lists) + return min(val, 1000) + + +def safe_range(val): + # As per range, but don't trust the value too much: cap it to a safety + # threshold in case the data was corrupted + return xrange(safety_limit(val)) + + +class StringTruncated(RuntimeError): + pass + +class TruncatedStringIO(object): + '''Similar to cStringIO, but can truncate the output by raising a + StringTruncated exception''' + def __init__(self, maxlen=None): + self._val = '' + self.maxlen = maxlen + + def write(self, data): + if self.maxlen: + if len(data) + len(self._val) > self.maxlen: + # Truncation: + self._val += data[0:self.maxlen - len(self._val)] + raise StringTruncated() + + self._val += data + + def getvalue(self): + return self._val + +class PyObjectPtr(object): + """ + Class wrapping a gdb.Value that's a either a (PyObject*) within the + inferior process, or some subclass pointer e.g. (PyBytesObject*) + + There will be a subclass for every refined PyObject type that we care + about. + + Note that at every stage the underlying pointer could be NULL, point + to corrupt data, etc; this is the debugger, after all. + """ + _typename = 'PyObject' + + def __init__(self, gdbval, cast_to=None): + if cast_to: + self._gdbval = gdbval.cast(cast_to) + else: + self._gdbval = gdbval + + def field(self, name): + ''' + Get the gdb.Value for the given field within the PyObject, coping with + some python 2 versus python 3 differences. + + Various libpython types are defined using the "PyObject_HEAD" and + "PyObject_VAR_HEAD" macros. + + In Python 2, this these are defined so that "ob_type" and (for a var + object) "ob_size" are fields of the type in question. + + In Python 3, this is defined as an embedded PyVarObject type thus: + PyVarObject ob_base; + so that the "ob_size" field is located insize the "ob_base" field, and + the "ob_type" is most easily accessed by casting back to a (PyObject*). + ''' + if self.is_null(): + raise NullPyObjectPtr(self) + + if name == 'ob_type': + pyo_ptr = self._gdbval.cast(PyObjectPtr.get_gdb_type()) + return pyo_ptr.dereference()[name] + + if name == 'ob_size': + pyo_ptr = self._gdbval.cast(PyVarObjectPtr.get_gdb_type()) + return pyo_ptr.dereference()[name] + + # General case: look it up inside the object: + return self._gdbval.dereference()[name] + + def pyop_field(self, name): + ''' + Get a PyObjectPtr for the given PyObject* field within this PyObject, + coping with some python 2 versus python 3 differences. + ''' + return PyObjectPtr.from_pyobject_ptr(self.field(name)) + + def write_field_repr(self, name, out, visited): + ''' + Extract the PyObject* field named "name", and write its representation + to file-like object "out" + ''' + field_obj = self.pyop_field(name) + field_obj.write_repr(out, visited) + + def get_truncated_repr(self, maxlen): + ''' + Get a repr-like string for the data, but truncate it at "maxlen" bytes + (ending the object graph traversal as soon as you do) + ''' + out = TruncatedStringIO(maxlen) + try: + self.write_repr(out, set()) + except StringTruncated: + # Truncation occurred: + return out.getvalue() + '...(truncated)' + + # No truncation occurred: + return out.getvalue() + + def type(self): + return PyTypeObjectPtr(self.field('ob_type')) + + def is_null(self): + return 0 == long(self._gdbval) + + def is_optimized_out(self): + ''' + Is the value of the underlying PyObject* visible to the debugger? + + This can vary with the precise version of the compiler used to build + Python, and the precise version of gdb. + + See e.g. https://bugzilla.redhat.com/show_bug.cgi?id=556975 with + PyEval_EvalFrameEx's "f" + ''' + return self._gdbval.is_optimized_out + + def safe_tp_name(self): + try: + return self.type().field('tp_name').string() + except NullPyObjectPtr: + # NULL tp_name? + return 'unknown' + except RuntimeError: + # Can't even read the object at all? + return 'unknown' + + def proxyval(self, visited): + ''' + Scrape a value from the inferior process, and try to represent it + within the gdb process, whilst (hopefully) avoiding crashes when + the remote data is corrupt. + + Derived classes will override this. + + For example, a PyIntObject* with ob_ival 42 in the inferior process + should result in an int(42) in this process. + + visited: a set of all gdb.Value pyobject pointers already visited + whilst generating this value (to guard against infinite recursion when + visiting object graphs with loops). Analogous to Py_ReprEnter and + Py_ReprLeave + ''' + + class FakeRepr(object): + """ + Class representing a non-descript PyObject* value in the inferior + process for when we don't have a custom scraper, intended to have + a sane repr(). + """ + + def __init__(self, tp_name, address): + self.tp_name = tp_name + self.address = address + + def __repr__(self): + # For the NULL pointer, we have no way of knowing a type, so + # special-case it as per + # http://bugs.python.org/issue8032#msg100882 + if self.address == 0: + return '0x0' + return '<%s at remote 0x%x>' % (self.tp_name, self.address) + + return FakeRepr(self.safe_tp_name(), + long(self._gdbval)) + + def write_repr(self, out, visited): + ''' + Write a string representation of the value scraped from the inferior + process to "out", a file-like object. + ''' + # Default implementation: generate a proxy value and write its repr + # However, this could involve a lot of work for complicated objects, + # so for derived classes we specialize this + return out.write(repr(self.proxyval(visited))) + + @classmethod + def subclass_from_type(cls, t): + ''' + Given a PyTypeObjectPtr instance wrapping a gdb.Value that's a + (PyTypeObject*), determine the corresponding subclass of PyObjectPtr + to use + + Ideally, we would look up the symbols for the global types, but that + isn't working yet: + (gdb) python print gdb.lookup_symbol('PyList_Type')[0].value + Traceback (most recent call last): + File "", line 1, in + NotImplementedError: Symbol type not yet supported in Python scripts. + Error while executing Python code. + + For now, we use tp_flags, after doing some string comparisons on the + tp_name for some special-cases that don't seem to be visible through + flags + ''' + try: + tp_name = t.field('tp_name').string() + tp_flags = int(t.field('tp_flags')) + except RuntimeError: + # Handle any kind of error e.g. NULL ptrs by simply using the base + # class + return cls + + #print 'tp_flags = 0x%08x' % tp_flags + #print 'tp_name = %r' % tp_name + + name_map = {'bool': PyBoolObjectPtr, + 'classobj': PyClassObjectPtr, + 'instance': PyInstanceObjectPtr, + 'NoneType': PyNoneStructPtr, + 'frame': PyFrameObjectPtr, + 'set' : PySetObjectPtr, + 'frozenset' : PySetObjectPtr, + 'builtin_function_or_method' : PyCFunctionObjectPtr, + } + if tp_name in name_map: + return name_map[tp_name] + + if tp_flags & Py_TPFLAGS_HEAPTYPE: + return HeapTypeObjectPtr + + if tp_flags & Py_TPFLAGS_INT_SUBCLASS: + return PyIntObjectPtr + if tp_flags & Py_TPFLAGS_LONG_SUBCLASS: + return PyLongObjectPtr + if tp_flags & Py_TPFLAGS_LIST_SUBCLASS: + return PyListObjectPtr + if tp_flags & Py_TPFLAGS_TUPLE_SUBCLASS: + return PyTupleObjectPtr + if tp_flags & Py_TPFLAGS_BYTES_SUBCLASS: + return PyBytesObjectPtr + if tp_flags & Py_TPFLAGS_UNICODE_SUBCLASS: + return PyUnicodeObjectPtr + if tp_flags & Py_TPFLAGS_DICT_SUBCLASS: + return PyDictObjectPtr + if tp_flags & Py_TPFLAGS_BASE_EXC_SUBCLASS: + return PyBaseExceptionObjectPtr + #if tp_flags & Py_TPFLAGS_TYPE_SUBCLASS: + # return PyTypeObjectPtr + + # Use the base class: + return cls + + @classmethod + def from_pyobject_ptr(cls, gdbval): + ''' + Try to locate the appropriate derived class dynamically, and cast + the pointer accordingly. + ''' + try: + p = PyObjectPtr(gdbval) + cls = cls.subclass_from_type(p.type()) + return cls(gdbval, cast_to=cls.get_gdb_type()) + except RuntimeError: + # Handle any kind of error e.g. NULL ptrs by simply using the base + # class + pass + return cls(gdbval) + + @classmethod + def get_gdb_type(cls): + return gdb.lookup_type(cls._typename).pointer() + + def as_address(self): + return long(self._gdbval) + +class PyVarObjectPtr(PyObjectPtr): + _typename = 'PyVarObject' + +class ProxyAlreadyVisited(object): + ''' + Placeholder proxy to use when protecting against infinite recursion due to + loops in the object graph. + + Analogous to the values emitted by the users of Py_ReprEnter and Py_ReprLeave + ''' + def __init__(self, rep): + self._rep = rep + + def __repr__(self): + return self._rep + + +def _write_instance_repr(out, visited, name, pyop_attrdict, address): + '''Shared code for use by old-style and new-style classes: + write a representation to file-like object "out"''' + out.write('<') + out.write(name) + + # Write dictionary of instance attributes: + if isinstance(pyop_attrdict, PyDictObjectPtr): + out.write('(') + first = True + for pyop_arg, pyop_val in pyop_attrdict.iteritems(): + if not first: + out.write(', ') + first = False + out.write(pyop_arg.proxyval(visited)) + out.write('=') + pyop_val.write_repr(out, visited) + out.write(')') + out.write(' at remote 0x%x>' % address) + + +class InstanceProxy(object): + + def __init__(self, cl_name, attrdict, address): + self.cl_name = cl_name + self.attrdict = attrdict + self.address = address + + def __repr__(self): + if isinstance(self.attrdict, dict): + kwargs = ', '.join(["%s=%r" % (arg, val) + for arg, val in self.attrdict.iteritems()]) + return '<%s(%s) at remote 0x%x>' % (self.cl_name, + kwargs, self.address) + else: + return '<%s at remote 0x%x>' % (self.cl_name, + self.address) + +def _PyObject_VAR_SIZE(typeobj, nitems): + return ( ( typeobj.field('tp_basicsize') + + nitems * typeobj.field('tp_itemsize') + + (SIZEOF_VOID_P - 1) + ) & ~(SIZEOF_VOID_P - 1) + ).cast(_type_size_t) + +class HeapTypeObjectPtr(PyObjectPtr): + _typename = 'PyObject' + + def get_attr_dict(self): + ''' + Get the PyDictObject ptr representing the attribute dictionary + (or None if there's a problem) + ''' + try: + typeobj = self.type() + dictoffset = int_from_int(typeobj.field('tp_dictoffset')) + if dictoffset != 0: + if dictoffset < 0: + type_PyVarObject_ptr = gdb.lookup_type('PyVarObject').pointer() + tsize = int_from_int(self._gdbval.cast(type_PyVarObject_ptr)['ob_size']) + if tsize < 0: + tsize = -tsize + size = _PyObject_VAR_SIZE(typeobj, tsize) + dictoffset += size + assert dictoffset > 0 + assert dictoffset % SIZEOF_VOID_P == 0 + + dictptr = self._gdbval.cast(_type_char_ptr) + dictoffset + PyObjectPtrPtr = PyObjectPtr.get_gdb_type().pointer() + dictptr = dictptr.cast(PyObjectPtrPtr) + return PyObjectPtr.from_pyobject_ptr(dictptr.dereference()) + except RuntimeError: + # Corrupt data somewhere; fail safe + pass + + # Not found, or some kind of error: + return None + + def proxyval(self, visited): + ''' + Support for new-style classes. + + Currently we just locate the dictionary using a transliteration to + python of _PyObject_GetDictPtr, ignoring descriptors + ''' + # Guard against infinite loops: + if self.as_address() in visited: + return ProxyAlreadyVisited('<...>') + visited.add(self.as_address()) + + pyop_attr_dict = self.get_attr_dict() + if pyop_attr_dict: + attr_dict = pyop_attr_dict.proxyval(visited) + else: + attr_dict = {} + tp_name = self.safe_tp_name() + + # New-style class: + return InstanceProxy(tp_name, attr_dict, long(self._gdbval)) + + def write_repr(self, out, visited): + # Guard against infinite loops: + if self.as_address() in visited: + out.write('<...>') + return + visited.add(self.as_address()) + + pyop_attrdict = self.get_attr_dict() + _write_instance_repr(out, visited, + self.safe_tp_name(), pyop_attrdict, self.as_address()) + +class ProxyException(Exception): + def __init__(self, tp_name, args): + self.tp_name = tp_name + self.args = args + + def __repr__(self): + return '%s%r' % (self.tp_name, self.args) + +class PyBaseExceptionObjectPtr(PyObjectPtr): + """ + Class wrapping a gdb.Value that's a PyBaseExceptionObject* i.e. an exception + within the process being debugged. + """ + _typename = 'PyBaseExceptionObject' + + def proxyval(self, visited): + # Guard against infinite loops: + if self.as_address() in visited: + return ProxyAlreadyVisited('(...)') + visited.add(self.as_address()) + arg_proxy = self.pyop_field('args').proxyval(visited) + return ProxyException(self.safe_tp_name(), + arg_proxy) + + def write_repr(self, out, visited): + # Guard against infinite loops: + if self.as_address() in visited: + out.write('(...)') + return + visited.add(self.as_address()) + + out.write(self.safe_tp_name()) + self.write_field_repr('args', out, visited) + +class PyClassObjectPtr(PyObjectPtr): + """ + Class wrapping a gdb.Value that's a PyClassObject* i.e. a + instance within the process being debugged. + """ + _typename = 'PyClassObject' + + +class BuiltInFunctionProxy(object): + def __init__(self, ml_name): + self.ml_name = ml_name + + def __repr__(self): + return "" % self.ml_name + +class BuiltInMethodProxy(object): + def __init__(self, ml_name, pyop_m_self): + self.ml_name = ml_name + self.pyop_m_self = pyop_m_self + + def __repr__(self): + return ('' + % (self.ml_name, + self.pyop_m_self.safe_tp_name(), + self.pyop_m_self.as_address()) + ) + +class PyCFunctionObjectPtr(PyObjectPtr): + """ + Class wrapping a gdb.Value that's a PyCFunctionObject* + (see Include/methodobject.h and Objects/methodobject.c) + """ + _typename = 'PyCFunctionObject' + + def proxyval(self, visited): + m_ml = self.field('m_ml') # m_ml is a (PyMethodDef*) + ml_name = m_ml['ml_name'].string() + + pyop_m_self = self.pyop_field('m_self') + if pyop_m_self.is_null(): + return BuiltInFunctionProxy(ml_name) + else: + return BuiltInMethodProxy(ml_name, pyop_m_self) + + +class PyCodeObjectPtr(PyObjectPtr): + """ + Class wrapping a gdb.Value that's a PyCodeObject* i.e. a instance + within the process being debugged. + """ + _typename = 'PyCodeObject' + + def addr2line(self, addrq): + ''' + Get the line number for a given bytecode offset + + Analogous to PyCode_Addr2Line; translated from pseudocode in + Objects/lnotab_notes.txt + ''' + co_lnotab = self.pyop_field('co_lnotab').proxyval(set()) + + # Initialize lineno to co_firstlineno as per PyCode_Addr2Line + # not 0, as lnotab_notes.txt has it: + lineno = int_from_int(self.field('co_firstlineno')) + + addr = 0 + for addr_incr, line_incr in zip(co_lnotab[::2], co_lnotab[1::2]): + addr += ord(addr_incr) + if addr > addrq: + return lineno + lineno += ord(line_incr) + return lineno + + +class PyDictObjectPtr(PyObjectPtr): + """ + Class wrapping a gdb.Value that's a PyDictObject* i.e. a dict instance + within the process being debugged. + """ + _typename = 'PyDictObject' + + def iteritems(self): + ''' + Yields a sequence of (PyObjectPtr key, PyObjectPtr value) pairs, + analagous to dict.iteritems() + ''' + for i in safe_range(self.field('ma_mask') + 1): + ep = self.field('ma_table') + i + pyop_value = PyObjectPtr.from_pyobject_ptr(ep['me_value']) + if not pyop_value.is_null(): + pyop_key = PyObjectPtr.from_pyobject_ptr(ep['me_key']) + yield (pyop_key, pyop_value) + + def proxyval(self, visited): + # Guard against infinite loops: + if self.as_address() in visited: + return ProxyAlreadyVisited('{...}') + visited.add(self.as_address()) + + result = {} + for pyop_key, pyop_value in self.iteritems(): + proxy_key = pyop_key.proxyval(visited) + proxy_value = pyop_value.proxyval(visited) + result[proxy_key] = proxy_value + return result + + def write_repr(self, out, visited): + # Guard against infinite loops: + if self.as_address() in visited: + out.write('{...}') + return + visited.add(self.as_address()) + + out.write('{') + first = True + for pyop_key, pyop_value in self.iteritems(): + if not first: + out.write(', ') + first = False + pyop_key.write_repr(out, visited) + out.write(': ') + pyop_value.write_repr(out, visited) + out.write('}') + +class PyInstanceObjectPtr(PyObjectPtr): + _typename = 'PyInstanceObject' + + def proxyval(self, visited): + # Guard against infinite loops: + if self.as_address() in visited: + return ProxyAlreadyVisited('<...>') + visited.add(self.as_address()) + + # Get name of class: + in_class = self.pyop_field('in_class') + cl_name = in_class.pyop_field('cl_name').proxyval(visited) + + # Get dictionary of instance attributes: + in_dict = self.pyop_field('in_dict').proxyval(visited) + + # Old-style class: + return InstanceProxy(cl_name, in_dict, long(self._gdbval)) + + def write_repr(self, out, visited): + # Guard against infinite loops: + if self.as_address() in visited: + out.write('<...>') + return + visited.add(self.as_address()) + + # Old-style class: + + # Get name of class: + in_class = self.pyop_field('in_class') + cl_name = in_class.pyop_field('cl_name').proxyval(visited) + + # Get dictionary of instance attributes: + pyop_in_dict = self.pyop_field('in_dict') + + _write_instance_repr(out, visited, + cl_name, pyop_in_dict, self.as_address()) + +class PyListObjectPtr(PyObjectPtr): + _typename = 'PyListObject' + + def __getitem__(self, i): + # Get the gdb.Value for the (PyObject*) with the given index: + field_ob_item = self.field('ob_item') + return field_ob_item[i] + + def proxyval(self, visited): + # Guard against infinite loops: + if self.as_address() in visited: + return ProxyAlreadyVisited('[...]') + visited.add(self.as_address()) + + result = [PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited) + for i in safe_range(int_from_int(self.field('ob_size')))] + return result + + def write_repr(self, out, visited): + # Guard against infinite loops: + if self.as_address() in visited: + out.write('[...]') + return + visited.add(self.as_address()) + + out.write('[') + for i in safe_range(int_from_int(self.field('ob_size'))): + if i > 0: + out.write(', ') + element = PyObjectPtr.from_pyobject_ptr(self[i]) + element.write_repr(out, visited) + out.write(']') + +class PyLongObjectPtr(PyObjectPtr): + _typename = 'PyLongObject' + + def proxyval(self, visited): + ''' + Python's Include/longobjrep.h has this declaration: + struct _longobject { + PyObject_VAR_HEAD + digit ob_digit[1]; + }; + + with this description: + The absolute value of a number is equal to + SUM(for i=0 through abs(ob_size)-1) ob_digit[i] * 2**(SHIFT*i) + Negative numbers are represented with ob_size < 0; + zero is represented by ob_size == 0. + + where SHIFT can be either: + #define PyLong_SHIFT 30 + #define PyLong_SHIFT 15 + ''' + ob_size = long(self.field('ob_size')) + if ob_size == 0: + return 0L + + ob_digit = self.field('ob_digit') + + if gdb.lookup_type('digit').sizeof == 2: + SHIFT = 15L + else: + SHIFT = 30L + + digits = [long(ob_digit[i]) * 2**(SHIFT*i) + for i in safe_range(abs(ob_size))] + result = sum(digits) + if ob_size < 0: + result = -result + return result + +class PyBoolObjectPtr(PyLongObjectPtr): + """ + Class wrapping a gdb.Value that's a PyBoolObject* i.e. one of the two + instances (Py_True/Py_False) within the process being debugged. + """ + def proxyval(self, visited): + if PyLongObjectPtr.proxyval(self, visited): + return True + else: + return False + +class PyNoneStructPtr(PyObjectPtr): + """ + Class wrapping a gdb.Value that's a PyObject* pointing to the + singleton (we hope) _Py_NoneStruct with ob_type PyNone_Type + """ + _typename = 'PyObject' + + def proxyval(self, visited): + return None + + +class PyFrameObjectPtr(PyObjectPtr): + _typename = 'PyFrameObject' + + def __init__(self, gdbval, cast_to): + PyObjectPtr.__init__(self, gdbval, cast_to) + + if not self.is_optimized_out(): + self.co = PyCodeObjectPtr.from_pyobject_ptr(self.field('f_code')) + self.co_name = self.co.pyop_field('co_name') + self.co_filename = self.co.pyop_field('co_filename') + + self.f_lineno = int_from_int(self.field('f_lineno')) + self.f_lasti = int_from_int(self.field('f_lasti')) + self.co_nlocals = int_from_int(self.co.field('co_nlocals')) + self.co_varnames = PyTupleObjectPtr.from_pyobject_ptr(self.co.field('co_varnames')) + + def iter_locals(self): + ''' + Yield a sequence of (name,value) pairs of PyObjectPtr instances, for + the local variables of this frame + ''' + if self.is_optimized_out(): + return + + f_localsplus = self.field('f_localsplus') + for i in safe_range(self.co_nlocals): + pyop_value = PyObjectPtr.from_pyobject_ptr(f_localsplus[i]) + if not pyop_value.is_null(): + pyop_name = PyObjectPtr.from_pyobject_ptr(self.co_varnames[i]) + yield (pyop_name, pyop_value) + + def iter_globals(self): + ''' + Yield a sequence of (name,value) pairs of PyObjectPtr instances, for + the global variables of this frame + ''' + if self.is_optimized_out(): + return + + pyop_globals = self.pyop_field('f_globals') + return pyop_globals.iteritems() + + def iter_builtins(self): + ''' + Yield a sequence of (name,value) pairs of PyObjectPtr instances, for + the builtin variables + ''' + if self.is_optimized_out(): + return + + pyop_builtins = self.pyop_field('f_builtins') + return pyop_builtins.iteritems() + + def get_var_by_name(self, name): + ''' + Look for the named local variable, returning a (PyObjectPtr, scope) pair + where scope is a string 'local', 'global', 'builtin' + + If not found, return (None, None) + ''' + for pyop_name, pyop_value in self.iter_locals(): + if name == pyop_name.proxyval(set()): + return pyop_value, 'local' + for pyop_name, pyop_value in self.iter_globals(): + if name == pyop_name.proxyval(set()): + return pyop_value, 'global' + for pyop_name, pyop_value in self.iter_builtins(): + if name == pyop_name.proxyval(set()): + return pyop_value, 'builtin' + return None, None + + def filename(self): + '''Get the path of the current Python source file, as a string''' + if self.is_optimized_out(): + return '(frame information optimized out)' + return self.co_filename.proxyval(set()) + + def current_line_num(self): + '''Get current line number as an integer (1-based) + + Translated from PyFrame_GetLineNumber and PyCode_Addr2Line + + See Objects/lnotab_notes.txt + ''' + if self.is_optimized_out(): + return None + f_trace = self.field('f_trace') + if long(f_trace) != 0: + # we have a non-NULL f_trace: + return self.f_lineno + else: + #try: + return self.co.addr2line(self.f_lasti) + #except ValueError: + # return self.f_lineno + + def current_line(self): + '''Get the text of the current source line as a string, with a trailing + newline character''' + if self.is_optimized_out(): + return '(frame information optimized out)' + with open(self.filename(), 'r') as f: + all_lines = f.readlines() + # Convert from 1-based current_line_num to 0-based list offset: + return all_lines[self.current_line_num()-1] + + def write_repr(self, out, visited): + if self.is_optimized_out(): + out.write('(frame information optimized out)') + return + out.write('Frame 0x%x, for file %s, line %i, in %s (' + % (self.as_address(), + self.co_filename.proxyval(visited), + self.current_line_num(), + self.co_name.proxyval(visited))) + first = True + for pyop_name, pyop_value in self.iter_locals(): + if not first: + out.write(', ') + first = False + + out.write(pyop_name.proxyval(visited)) + out.write('=') + pyop_value.write_repr(out, visited) + + out.write(')') + +class PySetObjectPtr(PyObjectPtr): + _typename = 'PySetObject' + + def proxyval(self, visited): + # Guard against infinite loops: + if self.as_address() in visited: + return ProxyAlreadyVisited('%s(...)' % self.safe_tp_name()) + visited.add(self.as_address()) + + members = [] + table = self.field('table') + for i in safe_range(self.field('mask')+1): + setentry = table[i] + key = setentry['key'] + if key != 0: + key_proxy = PyObjectPtr.from_pyobject_ptr(key).proxyval(visited) + if key_proxy != '': + members.append(key_proxy) + if self.safe_tp_name() == 'frozenset': + return frozenset(members) + else: + return set(members) + + def write_repr(self, out, visited): + out.write(self.safe_tp_name()) + + # Guard against infinite loops: + if self.as_address() in visited: + out.write('(...)') + return + visited.add(self.as_address()) + + out.write('([') + first = True + table = self.field('table') + for i in safe_range(self.field('mask')+1): + setentry = table[i] + key = setentry['key'] + if key != 0: + pyop_key = PyObjectPtr.from_pyobject_ptr(key) + key_proxy = pyop_key.proxyval(visited) # FIXME! + if key_proxy != '': + if not first: + out.write(', ') + first = False + pyop_key.write_repr(out, visited) + out.write('])') + + +class PyBytesObjectPtr(PyObjectPtr): + _typename = 'PyBytesObject' + + def __str__(self): + field_ob_size = self.field('ob_size') + field_ob_sval = self.field('ob_sval') + char_ptr = field_ob_sval.address.cast(_type_unsigned_char_ptr) + return ''.join([chr(char_ptr[i]) for i in safe_range(field_ob_size)]) + + def proxyval(self, visited): + return str(self) + +class PyTupleObjectPtr(PyObjectPtr): + _typename = 'PyTupleObject' + + def __getitem__(self, i): + # Get the gdb.Value for the (PyObject*) with the given index: + field_ob_item = self.field('ob_item') + return field_ob_item[i] + + def proxyval(self, visited): + # Guard against infinite loops: + if self.as_address() in visited: + return ProxyAlreadyVisited('(...)') + visited.add(self.as_address()) + + result = tuple([PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited) + for i in safe_range(int_from_int(self.field('ob_size')))]) + return result + + def write_repr(self, out, visited): + # Guard against infinite loops: + if self.as_address() in visited: + out.write('(...)') + return + visited.add(self.as_address()) + + out.write('(') + for i in safe_range(int_from_int(self.field('ob_size'))): + if i > 0: + out.write(', ') + element = PyObjectPtr.from_pyobject_ptr(self[i]) + element.write_repr(out, visited) + if self.field('ob_size') == 1: + out.write(',)') + else: + out.write(')') + +class PyTypeObjectPtr(PyObjectPtr): + _typename = 'PyTypeObject' + + +class PyUnicodeObjectPtr(PyObjectPtr): + _typename = 'PyUnicodeObject' + + def proxyval(self, visited): + # From unicodeobject.h: + # Py_ssize_t length; /* Length of raw Unicode data in buffer */ + # Py_UNICODE *str; /* Raw Unicode buffer */ + field_length = long(self.field('length')) + field_str = self.field('str') + + # Gather a list of ints from the Py_UNICODE array; these are either + # UCS-2 or UCS-4 code points: + Py_UNICODEs = [int(field_str[i]) for i in safe_range(field_length)] + + # Convert the int code points to unicode characters, and generate a + # local unicode instance: + result = u''.join([unichr(ucs) for ucs in Py_UNICODEs]) + return result + + +def int_from_int(gdbval): + return int(str(gdbval)) + + +def stringify(val): + # TODO: repr() puts everything on one line; pformat can be nicer, but + # can lead to v.long results; this function isolates the choice + if True: + return repr(val) + else: + from pprint import pformat + return pformat(val) + + +class PyObjectPtrPrinter: + "Prints a (PyObject*)" + + def __init__ (self, gdbval): + self.gdbval = gdbval + + def to_string (self): + pyop = PyObjectPtr.from_pyobject_ptr(self.gdbval) + if True: + return pyop.get_truncated_repr(MAX_OUTPUT_LEN) + else: + # Generate full proxy value then stringify it. + # Doing so could be expensive + proxyval = pyop.proxyval(set()) + return stringify(proxyval) + +def pretty_printer_lookup(gdbval): + type = gdbval.type.unqualified() + if type.code == gdb.TYPE_CODE_PTR: + type = type.target().unqualified() + t = str(type) + if t in ("PyObject", "PyFrameObject", "PyUnicodeObject"): + return PyObjectPtrPrinter(gdbval) + +""" +During development, I've been manually invoking the code in this way: +(gdb) python + +import sys +sys.path.append('/home/david/coding/python-gdb') +import libpython +end + +then reloading it after each edit like this: +(gdb) python reload(libpython) + +The following code should ensure that the prettyprinter is registered +if the code is autoloaded by gdb when visiting libpython.so, provided +that this python file is installed to the same path as the library (or its +.debug file) plus a "-gdb.py" suffix, e.g: + /usr/lib/libpython2.6.so.1.0-gdb.py + /usr/lib/debug/usr/lib/libpython2.6.so.1.0.debug-gdb.py +""" +def register (obj): + if obj == None: + obj = gdb + + # Wire up the pretty-printer + obj.pretty_printers.append(pretty_printer_lookup) + +register (gdb.current_objfile ()) + + +class Frame(object): + ''' + Wrapper for gdb.Frame, adding various methods + ''' + def __init__(self, gdbframe): + self._gdbframe = gdbframe + + def older(self): + older = self._gdbframe.older() + if older: + return Frame(older) + else: + return None + + def newer(self): + newer = self._gdbframe.newer() + if newer: + return Frame(newer) + else: + return None + + def select(self): + self._gdbframe.select() + + def get_index(self): + '''Calculate index of frame, starting at 0 for the newest frame within + this thread''' + index = 0 + # Go down until you reach the newest frame: + iter_frame = self + while iter_frame.newer(): + index += 1 + iter_frame = iter_frame.newer() + return index + + def is_evalframeex(self): + if self._gdbframe.function(): + if self._gdbframe.function().name == 'PyEval_EvalFrameEx': + ''' + I believe we also need to filter on the inline + struct frame_id.inline_depth, only regarding frames with + an inline depth of 0 as actually being this function + + So we reject those with type gdb.INLINE_FRAME + ''' + if self._gdbframe.type() == gdb.NORMAL_FRAME: + # We have a PyEval_EvalFrameEx frame: + return True + + return False + + def get_pyop(self): + try: + f = self._gdbframe.read_var('f') + return PyFrameObjectPtr.from_pyobject_ptr(f) + except ValueError: + return None + + @classmethod + def get_selected_frame(cls): + _gdbframe = gdb.selected_frame() + if _gdbframe: + return Frame(_gdbframe) + return None + + @classmethod + def get_selected_python_frame(cls): + '''Try to obtain the Frame for the python code in the selected frame, + or None''' + frame = cls.get_selected_frame() + + while frame: + if frame.is_evalframeex(): + return frame + frame = frame.older() + + # Not found: + return None + + def print_summary(self): + if self.is_evalframeex(): + pyop = self.get_pyop() + if pyop: + sys.stdout.write('#%i %s\n' % (self.get_index(), pyop.get_truncated_repr(MAX_OUTPUT_LEN))) + sys.stdout.write(pyop.current_line()) + else: + sys.stdout.write('#%i (unable to read python frame information)\n' % self.get_index()) + else: + sys.stdout.write('#%i\n' % self.get_index()) + +class PyList(gdb.Command): + '''List the current Python source code, if any + + Use + py-list START + to list at a different line number within the python source. + + Use + py-list START, END + to list a specific range of lines within the python source. + ''' + + def __init__(self): + gdb.Command.__init__ (self, + "py-list", + gdb.COMMAND_FILES, + gdb.COMPLETE_NONE) + + + def invoke(self, args, from_tty): + import re + + start = None + end = None + + m = re.match(r'\s*(\d+)\s*', args) + if m: + start = int(m.group(0)) + end = start + 10 + + m = re.match(r'\s*(\d+)\s*,\s*(\d+)\s*', args) + if m: + start, end = map(int, m.groups()) + + frame = Frame.get_selected_python_frame() + if not frame: + print 'Unable to locate python frame' + return + + pyop = frame.get_pyop() + if not pyop: + print 'Unable to read information on python frame' + return + + filename = pyop.filename() + lineno = pyop.current_line_num() + + if start is None: + start = lineno - 5 + end = lineno + 5 + + if start<1: + start = 1 + + with open(filename, 'r') as f: + all_lines = f.readlines() + # start and end are 1-based, all_lines is 0-based; + # so [start-1:end] as a python slice gives us [start, end] as a + # closed interval + for i, line in enumerate(all_lines[start-1:end]): + linestr = str(i+start) + # Highlight current line: + if i + start == lineno: + linestr = '>' + linestr + sys.stdout.write('%4s %s' % (linestr, line)) + + +# ...and register the command: +PyList() + +def move_in_stack(move_up): + '''Move up or down the stack (for the py-up/py-down command)''' + frame = Frame.get_selected_python_frame() + while frame: + if move_up: + iter_frame = frame.older() + else: + iter_frame = frame.newer() + + if not iter_frame: + break + + if iter_frame.is_evalframeex(): + # Result: + iter_frame.select() + iter_frame.print_summary() + return + + frame = iter_frame + + if move_up: + print 'Unable to find an older python frame' + else: + print 'Unable to find a newer python frame' + +class PyUp(gdb.Command): + 'Select and print the python stack frame that called this one (if any)' + def __init__(self): + gdb.Command.__init__ (self, + "py-up", + gdb.COMMAND_STACK, + gdb.COMPLETE_NONE) + + + def invoke(self, args, from_tty): + move_in_stack(move_up=True) + +PyUp() + +class PyDown(gdb.Command): + 'Select and print the python stack frame called by this one (if any)' + def __init__(self): + gdb.Command.__init__ (self, + "py-down", + gdb.COMMAND_STACK, + gdb.COMPLETE_NONE) + + + def invoke(self, args, from_tty): + move_in_stack(move_up=False) + +PyDown() + +class PyBacktrace(gdb.Command): + 'Display the current python frame and all the frames within its call stack (if any)' + def __init__(self): + gdb.Command.__init__ (self, + "py-bt", + gdb.COMMAND_STACK, + gdb.COMPLETE_NONE) + + + def invoke(self, args, from_tty): + frame = Frame.get_selected_python_frame() + while frame: + if frame.is_evalframeex(): + frame.print_summary() + frame = frame.older() + +PyBacktrace() + +class PyPrint(gdb.Command): + 'Look up the given python variable name, and print it' + def __init__(self): + gdb.Command.__init__ (self, + "py-print", + gdb.COMMAND_DATA, + gdb.COMPLETE_NONE) + + + def invoke(self, args, from_tty): + name = str(args) + + frame = Frame.get_selected_python_frame() + if not frame: + print 'Unable to locate python frame' + return + + pyop_frame = frame.get_pyop() + if not pyop_frame: + print 'Unable to read information on python frame' + return + + pyop_var, scope = pyop_frame.get_var_by_name(name) + + if pyop_var: + print ('%s %r = %s' + % (scope, + name, + pyop_var.get_truncated_repr(MAX_OUTPUT_LEN))) + else: + print '%r not found' % name + +PyPrint() + +class PyLocals(gdb.Command): + 'Look up the given python variable name, and print it' + def __init__(self): + gdb.Command.__init__ (self, + "py-locals", + gdb.COMMAND_DATA, + gdb.COMPLETE_NONE) + + + def invoke(self, args, from_tty): + name = str(args) + + frame = Frame.get_selected_python_frame() + if not frame: + print 'Unable to locate python frame' + return + + pyop_frame = frame.get_pyop() + if not pyop_frame: + print 'Unable to read information on python frame' + return + + for pyop_name, pyop_value in pyop_frame.iter_locals(): + print ('%s = %s' + % (pyop_name.proxyval(set()), + pyop_value.get_truncated_repr(MAX_OUTPUT_LEN))) + +PyLocals() diff --git a/python3.spec b/python3.spec new file mode 100644 index 0000000..be00616 --- /dev/null +++ b/python3.spec @@ -0,0 +1,2705 @@ +# ================== +# Top-level metadata +# ================== + +%global pybasever 3.7 + +# pybasever without the dot: +%global pyshortver 37 + +Name: python%{pyshortver} +Summary: Version %{pybasever} of the Python interpreter +URL: https://www.python.org/ + + +# Second alpha +%global prerel a2 + +# WARNING When rebasing to a new Python version, +# remember to update the python3-docs package as well +Version: %{pybasever}.0 +Release: 0.1.%{?prerel}%{?dist} +License: Python + + +# ================================== +# Conditionals controlling the build +# ================================== + +# Note that the bcond macros are named for the CLI option they create. +# "%%bcond_without" means "ENABLE by default and create a --without option" + + +# Flat package, i.e. python36, python37, python38 for tox etc. +# warning: changes some other defaults +# in Fedora, never turn this on for the python3 package +# and always keep it on for python37 etc. +# WARNING: This does not change the package name and summary above +%bcond_without flatpackage + + +# Expensive optimizations (mainly, profile-guided optimizations) +%ifarch %{ix86} x86_64 +%bcond_without optimizations +%else +# On some architectures, the optimized build takes tens of hours, possibly +# longer than Koji's 24-hour timeout. Disable optimizations here. +%bcond_with optimizations +%endif + +# Run the test suite in %%check +%bcond_without tests + +# Ability to reuse RPM-installed pip using rewheel +%if %{with flatpackage} +%bcond_with rewheel +%else +%bcond_without rewheel +%endif + +# Extra build for debugging the interpreter or C-API extensions +# (the -debug subpackages) +%if %{with flatpackage} +%bcond_with debug_build +%else +%bcond_without debug_build +%endif + +# Support for the GDB debugger +%bcond_without gdb_hooks + +# Support for systemtap instrumentation +%bcond_with systemtap + +# The dbm.gnu module (key-value database) +%bcond_without gdbm + +# Main interpreter loop optimization +%bcond_without computed_gotos + +# Support for the Valgrind debugger/profiler +%ifnarch s390 %{mips} riscv64 +%bcond_without valgrind +%else +# Some arches don't have valgrind, disable support for it there. +%bcond_with valgrind +%endif + + +# ================================== +# Notes from bootstraping Python 3.6 +# ================================== +# +# New Python major version (3.X) break ABI and bytecode compatibility, +# so all packages depending on it need to be rebuilt. +# +# Due to a dependency cycle between Python, gdb, rpm, pip, setuptools, wheel, +# and other packages, this isn't straightforward. +# Build in the following order: +# +# 1. At the same time: +# - gdb without python support (add %%global _without_python 1 on top of +# gdb's SPEC file) +# - python-rpm-generators with bootstrapping_python set to 1 +# (this can be done also during step 2., but should be done before 3.) +# 2. python3 without rewheel (use %%bcond_with rewheel instead of +# %%bcond_without) +# 3. At the same time: +# - gdb with python support (remove %%global _without_python 1 on top of +# gdb's SPEC file) +# - python-rpm-generators with bootstrapping_python set to 0 +# (this can be done at any later step without negative effects) +# 4. rpm +# 5. python-setuptools with bootstrap set to 1 +# 6. python-pip with build_wheel set to 0 +# 7. python-wheel with %%bcond_without bootstrap +# 8. python-setuptools with bootstrap set to 0 and also with_check set to 0 +# 9. python-pip with build_wheel set to 1 +# 10. pyparsing +# 11. python3 with rewheel +# +# Then the most important packages have to be built, in dependency order. +# These were: +# python-sphinx, pytest, python-requests, cloud-init, dnf, anaconda, abrt +# +# After these have been built, a targeted rebuild should be done for the rest. + + +# ===================== +# General global macros +# ===================== + +%global pylibdir %{_libdir}/python%{pybasever} +%global dynload_dir %{pylibdir}/lib-dynload + +# ABIFLAGS, LDVERSION and SOABI are in the upstream configure.ac +# See PEP 3149 for some background: http://www.python.org/dev/peps/pep-3149/ +%global ABIFLAGS_optimized m +%global ABIFLAGS_debug dm + +%global LDVERSION_optimized %{pybasever}%{ABIFLAGS_optimized} +%global LDVERSION_debug %{pybasever}%{ABIFLAGS_debug} + +%global SOABI_optimized cpython-%{pyshortver}%{ABIFLAGS_optimized}-%{_arch}-linux%{_gnu} +%global SOABI_debug cpython-%{pyshortver}%{ABIFLAGS_debug}-%{_arch}-linux%{_gnu} + +# All bytecode files are in a __pycache__ subdirectory, with a name +# reflecting the version of the bytecode. +# See PEP 3147: http://www.python.org/dev/peps/pep-3147/ +# For example, +# foo/bar.py +# has bytecode at: +# foo/__pycache__/bar.cpython-%%{pyshortver}.pyc +# foo/__pycache__/bar.cpython-%%{pyshortver}.opt-1.pyc +# foo/__pycache__/bar.cpython-%%{pyshortver}.opt-2.pyc +%global bytecode_suffixes .cpython-%{pyshortver}*.pyc + +# Python's configure script defines SOVERSION, and this is used in the Makefile +# to determine INSTSONAME, the name of the libpython DSO: +# LDLIBRARY='libpython$(VERSION).so' +# INSTSONAME="$LDLIBRARY".$SOVERSION +# We mirror this here in order to make it easier to add the -gdb.py hooks. +# (if these get out of sync, the payload of the libs subpackage will fail +# and halt the build) +%global py_SOVERSION 1.0 +%global py_INSTSONAME_optimized libpython%{LDVERSION_optimized}.so.%{py_SOVERSION} +%global py_INSTSONAME_debug libpython%{LDVERSION_debug}.so.%{py_SOVERSION} + +# Disable automatic bytecompilation. The python3 binary is not yet be +# available in /usr/bin when Python is built. Also, the bytecompilation fails +# on files that test invalid syntax. +%undefine py_auto_byte_compile + +# For multilib support, files that are different between 32- and 64-bit arches +# need different filenames. Use "64" or "32" according to the word size. +# Currently, the best way to determine an architecture's word size happens to +# be checking %%{_lib}. +%if "%{_lib}" == "lib64" +%global wordsize 64 +%else +%global wordsize 32 +%endif + + +# ======================= +# Build-time requirements +# ======================= + +# (keep this list alphabetized) + +BuildRequires: autoconf +BuildRequires: bluez-libs-devel +BuildRequires: bzip2 +BuildRequires: bzip2-devel +BuildRequires: desktop-file-utils +BuildRequires: expat-devel + +BuildRequires: findutils +BuildRequires: gcc-c++ +%if %{with gdbm} +BuildRequires: gdbm-devel +%endif +BuildRequires: glibc-devel +BuildRequires: gmp-devel +BuildRequires: libappstream-glib +BuildRequires: libffi-devel +BuildRequires: libGL-devel +BuildRequires: libuuid-devel +BuildRequires: libX11-devel +BuildRequires: ncurses-devel + +BuildRequires: openssl-devel +BuildRequires: pkgconfig +BuildRequires: readline-devel +BuildRequires: sqlite-devel +BuildRequires: gdb + +%if %{with systemtap} +BuildRequires: systemtap-devel +BuildRequires: systemtap-sdt-devel +%global tapsetdir /usr/share/systemtap/tapset +%endif + +BuildRequires: tar +BuildRequires: tcl-devel +BuildRequires: tix-devel +BuildRequires: tk-devel + +%if %{with valgrind} +BuildRequires: valgrind-devel +%endif + +BuildRequires: xz-devel +BuildRequires: zlib-devel + +BuildRequires: /usr/bin/dtrace + +# workaround http://bugs.python.org/issue19804 (test_uuid requires ifconfig) +BuildRequires: /usr/sbin/ifconfig + +%if %{with rewheel} +BuildRequires: python3-setuptools +BuildRequires: python3-pip +%endif + + +# ======================= +# Source code and patches +# ======================= + +Source: https://www.python.org/ftp/python/%{version}/Python-%{version}%{prerel}.tar.xz + +# Supply an RPM macro "py_byte_compile" for the python3-devel subpackage +# to enable specfiles to selectively byte-compile individual files and paths +# with different Python runtimes as necessary: +Source3: macros.pybytecompile%{pybasever} + +# Systemtap tapset to make it easier to use the systemtap static probes +# (actually a template; LIBRARY_PATH will get fixed up during install) +# Written by dmalcolm; not yet sent upstream +Source5: libpython.stp + +# Example systemtap script using the tapset +# Written by wcohen, mjw, dmalcolm; not yet sent upstream +Source6: systemtap-example.stp + +# Another example systemtap script that uses the tapset +# Written by dmalcolm; not yet sent upstream +Source7: pyfuntop.stp + +# A simple script to check timestamps of bytecode files +# Run in check section with Python that is currently being built +# Written by bkabrda +Source8: check-pyc-and-pyo-timestamps.py + +# Backward compatible no-op macro for system-python +# Remove in Fedora 29 +Source9: macros.systempython + +# Desktop menu entry for idle3 +Source10: idle3.desktop + +# AppData file for idle3 +Source11: idle3.appdata.xml + +# 00001 # +# Fixup distutils/unixccompiler.py to remove standard library path from rpath: +# Was Patch0 in ivazquez' python3000 specfile: +Patch1: 00001-rpath.patch + +# 00055 # +# Systemtap support: add statically-defined probe points +# Patch sent upstream as http://bugs.python.org/issue14776 +# with some subsequent reworking to cope with LANG=C in an rpmbuild +# (where sys.getfilesystemencoding() == 'ascii') +Patch55: 00055-systemtap.patch + +# 00102 # +# Change the various install paths to use /usr/lib64/ instead or /usr/lib +# Only used when "%{_lib}" == "lib64" +# Not yet sent upstream. +Patch102: 00102-lib64.patch + +# 00111 # +# Patch the Makefile.pre.in so that the generated Makefile doesn't try to build +# a libpythonMAJOR.MINOR.a +# See https://bugzilla.redhat.com/show_bug.cgi?id=556092 +# Downstream only: not appropriate for upstream +Patch111: 00111-no-static-lib.patch + +# 00132 # +# Add non-standard hooks to unittest for use in the "check" phase below, when +# running selftests within the build: +# @unittest._skipInRpmBuild(reason) +# for tests that hang or fail intermittently within the build environment, and: +# @unittest._expectedFailureInRpmBuild +# for tests that always fail within the build environment +# +# The hooks only take effect if WITHIN_PYTHON_RPM_BUILD is set in the +# environment, which we set manually in the appropriate portion of the "check" +# phase below (and which potentially other python-* rpms could set, to reuse +# these unittest hooks in their own "check" phases) +Patch132: 00132-add-rpmbuild-hooks-to-unittest.patch + +# 00155 # +# Avoid allocating thunks in ctypes unless absolutely necessary, to avoid +# generating SELinux denials on "import ctypes" and "import uuid" when +# embedding Python within httpd +# See https://bugzilla.redhat.com/show_bug.cgi?id=814391 +Patch155: 00155-avoid-ctypes-thunks.patch + +# 00160 # +# Python 3.3 added os.SEEK_DATA and os.SEEK_HOLE, which may be present in the +# header files in the build chroot, but may not be supported in the running +# kernel, hence we disable this test in an rpm build. +# Adding these was upstream issue http://bugs.python.org/issue10142 +# Not yet sent upstream +Patch160: 00160-disable-test_fs_holes-in-rpm-build.patch + +# 00163 # +# Some tests within test_socket fail intermittently when run inside Koji; +# disable them using unittest._skipInRpmBuild +# Not yet sent upstream +Patch163: 00163-disable-parts-of-test_socket-in-rpm-build.patch + +# 00170 # +# In debug builds, try to print repr() when a C-level assert fails in the +# garbage collector (typically indicating a reference-counting error +# somewhere else e.g in an extension module) +# The new macros/functions within gcmodule.c are hidden to avoid exposing +# them within the extension API. +# Sent upstream: http://bugs.python.org/issue9263 +# See https://bugzilla.redhat.com/show_bug.cgi?id=614680 +Patch170: 00170-gc-assertions.patch + +# 00178 # +# Don't duplicate various FLAGS in sysconfig values +# http://bugs.python.org/issue17679 +# Does not affect python2 AFAICS (different sysconfig values initialization) +Patch178: 00178-dont-duplicate-flags-in-sysconfig.patch + +# 00189 # +# Add the rewheel module, allowing to recreate wheels from already installed +# ones +# https://github.com/bkabrda/rewheel +Patch189: 00189-add-rewheel-module.patch + +# 00205 # +# LIBPL variable in makefile takes LIBPL from configure.ac +# but the LIBPL variable defined there doesn't respect libdir macro +Patch205: 00205-make-libpl-respect-lib64.patch + +# 00251 +# Set values of prefix and exec_prefix in distutils install command +# to /usr/local if executable is /usr/bin/python* and RPM build +# is not detected to make pip and distutils install into separate location +# Fedora Change: https://fedoraproject.org/wiki/Changes/Making_sudo_pip_safe +Patch251: 00251-change-user-install-location.patch + +# 00264 # +# test_pass_by_value was added in Python 3.6.1 and on aarch64 +# it is catching an error that was there, but wasn't tested before. +# Therefore skipping the test on aarch64 until fixed upstream. +# Reported upstream: http://bugs.python.org/issue29804 +Patch264: 00264-skip-test-failing-on-aarch64.patch + +# 00273 # +# Skip test_float_with_comma, which fails in Koji with UnicodeDecodeError +# See https://bugzilla.redhat.com/show_bug.cgi?id=1484497 +Patch273: 00273-skip-float-test.patch + +# 00274 # +# Upstream uses Debian-style architecture naming. Change to match Fedora. +Patch274: 00274-fix-arch-names.patch + + +# (New patches go here ^^^) +# +# When adding new patches to "python" and "python3" in Fedora, EL, etc., +# please try to keep the patch numbers in-sync between all specfiles. +# +# More information, and a patch number catalog, is at: +# +# https://fedoraproject.org/wiki/SIGs/Python/PythonPatches + + +# ========================================== +# Descriptions, and metadata for subpackages +# ========================================== + +%if %{without flatpackage} + +# Packages with Python modules in standard locations automatically +# depend on python(abi). Provide that here. +Provides: python(abi) = %{pybasever} + +# For backward compatibility only, remove in F29: +Provides: system-python(abi) = %{pybasever} +Provides: system-python = %{version}-%{release} +Provides: system-python%{?_isa} = %{version}-%{release} +Obsoletes: system-python < %{version}-%{release} + +Requires: %{name}-libs%{?_isa} = %{version}-%{release} + +# In order to support multiple Python interpreters for development purposes, +# packages with with the naming scheme flatpackage (e.g. python35) exist for +# non-default versions of Python 3. +# For consistency, and to keep the upgrade path clean, we Provide/Obsolete +# these names here. +Provides: python%{pyshortver} = %{version}-%{release} +# Note that using Obsoletes without package version is not standard practice. +# Here we assert that *any* version of the system's default interpreter is +# preferable to an "extra" interpreter. For example, python3-3.6.1 will +# replace python36-3.6.2. +Obsoletes: python%{pyshortver} + +# Shall be removed in Fedora 31 +# The release is bumped to 20, so we can do f27 platform-python updates +# If the release in f27 ever goes >= 20, raise it here +# If platform-python is ever reintroduced, make it higher version than this: +%global platpyver 3.6.2-20 +Obsoletes: platform-python < %{platpyver} + +%if %{with rewheel} +Requires: python3-setuptools +Requires: python3-pip +%endif + +# The description used both for the SRPM and the main `python3` subpackage: +%description +Python is an accessible, high-level, dynamically typed, interpreted programming +language, designed with an emphasis on code readibility. +It includes an extensive standard library, and has a vast ecosystem of +third-party libraries. + +The ${name} package provides the "python3" executable: the reference +interpreter for the Python language, version 3. +The majority of its standard library is provided in the %{name}-libs package, +which should be installed automatically along with %{name}. +The remaining parts of the Python standard library are broken out into the +%{name}-tkinter and %{name}-test packages, which may need to be installed +separately. + +Documentation for Python is provided in the %{name}-docs package. + +Packages containing additional libraries for Python are generally named with +the "%{name}-" prefix. + + +%package libs +Summary: Python runtime libraries + +# The "enum" module is included in the standard library. +# Provide an upgrade path from the external library. +Provides: python3-enum34 = 1.0.4-5%{?dist} +Obsoletes: python3-enum34 < 1.0.4-5%{?dist} + +# Python 3 built with glibc >= 2.24.90-26 needs to require it +# See https://bugzilla.redhat.com/show_bug.cgi?id=1410644 +Requires: glibc%{?_isa} >= 2.24.90-26 + +# For backward compatibility only, remove in F29: +Provides: system-python-libs = %{version}-%{release} +Provides: system-python-libs%{?_isa} = %{version}-%{release} +Obsoletes: system-python-libs < %{version}-%{release} + +# Shall be removed in Fedora 31 +Obsoletes: platform-python-libs < %{platpyver} +Obsoletes: platform-python-libs-devel < %{platpyver} + +%description libs +This package contains runtime libraries for use by Python: +- the majority of the Python standard library +- a dynamically linked library for use by applications that embed Python as + a scripting language, and by the main "python3" executable + + +%package devel +Summary: Libraries and header files needed for Python development +Requires: %{name} = %{version}-%{release} +Requires: %{name}-libs%{?_isa} = %{version}-%{release} +BuildRequires: python-rpm-macros +Requires: python-rpm-macros +Requires: python3-rpm-macros +Requires: python3-rpm-generators + +# https://bugzilla.redhat.com/show_bug.cgi?id=1217376 +# https://bugzilla.redhat.com/show_bug.cgi?id=1496757 +# https://bugzilla.redhat.com/show_bug.cgi?id=1218294 +# TODO change to a specific subpackage once available (#1218294) +Requires: redhat-rpm-config + +Conflicts: %{name} < %{version}-%{release} + +# Shall be removed in Fedora 31 +Obsoletes: platform-python-devel < %{platpyver} + +%description devel +This package contains the header files and configuration needed to compile +Python extension modules (typically written in C or C++), to embed Python +into other programs, and to make binary distributions for Python libraries. + +It also contains the necessary macros to build RPM packages with Python modules. + + +%package tools +Summary: A collection of tools included with Python including 2to3 and idle +Requires: %{name} = %{version}-%{release} +Requires: %{name}-tkinter = %{version}-%{release} + +Provides: %{name}-2to3 = %{version}-%{release} +Provides: %{name}-idle = %{version}-%{release} +Provides: 2to3 = %{version}-%{release} +Provides: idle3 = %{version}-%{release} + +# https://bugzilla.redhat.com/show_bug.cgi?id=1111275 +# /usr/bin/2to3 was moved from here +# TODO Remove in Fedora 29 +Conflicts: python2-tools < 2.7.13-17 +Conflicts: python-tools < 2.7.13-17 + +# Shall be removed in Fedora 31 +Obsoletes: platform-python-tools < %{platpyver} + +%description tools +This package contains several tools included with Python, including: +- 2to3, an automatic source converter from Python 2.X +- idle, a basic graphical development environment + + +%package tkinter +Summary: A GUI toolkit for Python +Requires: %{name} = %{version}-%{release} + +# Shall be removed in Fedora 31 +Obsoletes: platform-python-tkinter < %{platpyver} + +%description tkinter +The Tkinter (Tk interface) library is a graphical user interface toolkit for +the Python programming language. + + +%package test +Summary: The self-test suite for the main python3 package +Requires: %{name} = %{version}-%{release} +Requires: %{name}-tools = %{version}-%{release} + +# Shall be removed in Fedora 31 +Obsoletes: platform-python-test < %{platpyver} + +%description test +The self-test suite for the Python interpreter. + +This is only useful to test Python itself. For testing general Python code, +you should use the unittest module from %{name}-libs, or a library such as +${name}-pytest or ${name}-nose. + + +%if %{with debug_build} +%package debug +Summary: Debug version of the Python runtime + +# The debug build is an all-in-one package version of the regular build, and +# shares the same .py/.pyc files and directories as the regular build. Hence +# we depend on all of the subpackages of the regular build: +Requires: %{name}%{?_isa} = %{version}-%{release} +Requires: %{name}-libs%{?_isa} = %{version}-%{release} +Requires: %{name}-devel%{?_isa} = %{version}-%{release} +Requires: %{name}-test%{?_isa} = %{version}-%{release} +Requires: %{name}-tkinter%{?_isa} = %{version}-%{release} +Requires: %{name}-tools%{?_isa} = %{version}-%{release} + +%description debug +python3-debug provides a version of the Python runtime with numerous debugging +features enabled, aimed at advanced Python users such as developers of Python +extension modules. + +This version uses more memory and will be slower than the regular Python build, +but is useful for tracking down reference-counting issues and other bugs. + +The bytecode format is unchanged, so that .pyc files are compatible between +this and the standard version of Python, but the debugging features mean that +C/C++ extension modules are ABI-incompatible and must be built for each version +separately. + +The debug build shares installation directories with the standard Python +runtime, so that .py and .pyc files can be shared. +Compiled extension modules use a special ABI flag ("d") in the filename, +so extensions for both verisons can co-exist in the same directory. +%endif # with debug_build + +%else # with flatpackage + +Requires: redhat-rpm-config + +# We'll not provide this, on purpose +# No package in Fedora shall ever depend on flatpackage via this +%global __requires_exclude ^python\\(abi\\) = 3\\..$ +%global __provides_exclude ^python\\(abi\\) = 3\\..$ + +# We keep those inside on purpose +Provides: bundled(python3-pip) = 9.0.1 +Provides: bundled(python3-setuptools) = 28.8.0 + +# The descripton for the flat flatpackage package +%description +This package exists to allow developers to test their code against an newer +version of Python. This is not a full Python stack and if you wish to run +your applications with Python %{pybasever}, update your Fedora to a newer +version once Python %{pybasever} is stable. + +%endif # with flatpackage + +# ====================================================== +# The prep phase of the build: +# ====================================================== + +%prep +%setup -q -n Python-%{version}%{?prerel} + +%if %{with systemtap} +# Provide an example of usage of the tapset: +cp -a %{SOURCE6} . +cp -a %{SOURCE7} . +%endif # with systemtap + +# Remove bundled libraries to ensure that we're using the system copy. +rm -r Modules/expat + +%if %{with rewheel} +%global pip_version 9.0.1 +sed -r -i s/'_PIP_VERSION = "[0-9.]+"'/'_PIP_VERSION = "%{pip_version}"'/ Lib/ensurepip/__init__.py +%endif + +# +# Apply patches: +# +%patch1 -p1 + +%if %{with systemtap} +%patch55 -p1 -b .systemtap +%endif + +%if "%{_lib}" == "lib64" +%patch102 -p1 +%endif +%patch111 -p1 +%patch132 -p1 +%patch155 -p1 +%patch160 -p1 +%patch163 -p1 +%patch170 -p1 +%patch178 -p1 + +%if %{with rewheel} +%patch189 -p1 +%endif + +%patch205 -p1 +%patch251 -p1 + +%ifarch aarch64 +%patch264 -p1 +%endif + +%patch273 -p1 +%patch274 -p1 + + +# Remove files that should be generated by the build +# (This is after patching, so that we can use patches directly from upstream) +rm configure pyconfig.h.in + + +# ====================================================== +# Configuring and building the code: +# ====================================================== + +%build + +# Regenerate the configure script and pyconfig.h.in +autoconf +autoheader + +# Remember the current directory (which has sources and the configure script), +# so we can refer to it after we "cd" elsewhere. +topdir=$(pwd) + +# Get proper option names from bconds +%if %{with computed_gotos} +%global computed_gotos_flag yes +%else +%global computed_gotos_flag no +%endif + +%if %{with optimizations} +%global optimizations_flag "--enable-optimizations" +%else +%global optimizations_flag "--disable-optimizations" +%endif + +# Set common compiler/linker flags +export CFLAGS="$RPM_OPT_FLAGS -D_GNU_SOURCE -fPIC -fwrapv" +export CXXFLAGS="$RPM_OPT_FLAGS -D_GNU_SOURCE -fPIC -fwrapv" +export CPPFLAGS="$(pkg-config --cflags-only-I libffi)" +export OPT="$RPM_OPT_FLAGS -D_GNU_SOURCE -fPIC -fwrapv" +export LINKCC="gcc" +export CFLAGS="$CFLAGS $(pkg-config --cflags openssl)" +export LDFLAGS="$RPM_LD_FLAGS -g $(pkg-config --libs-only-L openssl)" + +# We can build several different configurations of Python: regular and debug. +# Define a common function that does one build: +BuildPython() { + ConfName=$1 + ExtraConfigArgs=$2 + MoreCFlags=$3 + + # Each build is done in its own directory + ConfDir=build/$ConfName + echo STARTING: BUILD OF PYTHON FOR CONFIGURATION: $ConfName + mkdir -p $ConfDir + pushd $ConfDir + + # Normally, %%configure looks for the "configure" script in the current + # directory. + # Since we changed directories, we need to tell %%configure where to look. + %global _configure $topdir/configure + +%configure \ + --enable-ipv6 \ + --enable-shared \ + --with-computed-gotos=%{computed_gotos_flag} \ + --with-dbmliborder=gdbm:ndbm:bdb \ + --with-system-expat \ + --with-system-ffi \ + --enable-loadable-sqlite-extensions \ + --with-dtrace \ + --with-lto \ +%if %{with systemtap} + --with-systemtap \ +%endif +%if %{with valgrind} + --with-valgrind \ +%endif + $ExtraConfigArgs \ + %{nil} + + # Invoke the build + make EXTRA_CFLAGS="$CFLAGS $MoreCFlags" %{?_smp_mflags} + + popd + echo FINISHED: BUILD OF PYTHON FOR CONFIGURATION: $ConfName +} + +# Call the above to build each configuration. + +%if %{with debug_build} +BuildPython debug \ + "--without-ensurepip --with-pydebug" \ + "-O0" +%endif # with debug_build + +BuildPython optimized \ + "--without-ensurepip %{optimizations_flag}" \ + "" + +# ====================================================== +# Installing the built code: +# ====================================================== + +%install + +# As in %%build, remember the current directory +topdir=$(pwd) + +# We install a collection of hooks for gdb that make it easier to debug +# executables linked against libpython3* (such as /usr/bin/python3 itself) +# +# These hooks are implemented in Python itself (though they are for the version +# of python that gdb is linked with) +# +# gdb-archer looks for them in the same path as the ELF file or its .debug +# file, with a -gdb.py suffix. +# We put them next to the debug file, because ldconfig would complain if +# it found non-library files directly in /usr/lib/ +# (see https://bugzilla.redhat.com/show_bug.cgi?id=562980) +# +# We'll put these files in the debuginfo package by installing them to e.g.: +# /usr/lib/debug/usr/lib/libpython3.2.so.1.0.debug-gdb.py +# (note that the debug path is /usr/lib/debug for both 32/64 bit) +# +# See https://fedoraproject.org/wiki/Features/EasierPythonDebugging for more +# information + +%if %{with gdb_hooks} +DirHoldingGdbPy=%{_prefix}/lib/debug/%{_libdir} +mkdir -p %{buildroot}$DirHoldingGdbPy +%endif # with gdb_hooks + +# Multilib support for pyconfig.h +# 32- and 64-bit versions of pyconfig.h are different. For multilib support +# (making it possible to install 32- and 64-bit versions simultaneously), +# we need to install them under different filenames, and to make the common +# "pyconfig.h" include the right file based on architecture. +# See https://bugzilla.redhat.com/show_bug.cgi?id=192747 +# Filanames are defined here: +%global _pyconfig32_h pyconfig-32.h +%global _pyconfig64_h pyconfig-64.h +%global _pyconfig_h pyconfig-%{wordsize}.h + +# Use a common function to do an install for all our configurations: +InstallPython() { + + ConfName=$1 + PyInstSoName=$2 + MoreCFlags=$3 + LDVersion=$4 + Postfix=$5 + + # Switch to the directory with this configuration's built files + ConfDir=build/$ConfName + echo STARTING: INSTALL OF PYTHON FOR CONFIGURATION: $ConfName + mkdir -p $ConfDir + pushd $ConfDir + + make \ + DESTDIR=%{buildroot} \ + INSTALL="install -p" \ + EXTRA_CFLAGS="$MoreCFlags" \ + install + + popd + +%if %{with gdb_hooks} + # See comment on $DirHoldingGdbPy above + PathOfGdbPy=$DirHoldingGdbPy/$PyInstSoName-%{version}-%{release}.%{_arch}.debug-gdb.py + cp Tools/gdb/libpython.py %{buildroot}$PathOfGdbPy +%endif # with gdb_hooks + + # Rename the -devel script that differs on different arches to arch specific name + mv %{buildroot}%{_bindir}/python${LDVersion}-{,`uname -m`-}config + echo -e '#!/bin/sh\nexec `dirname $0`/python'${LDVersion}'-`uname -m`-config "$@"' > \ + %{buildroot}%{_bindir}/python${LDVersion}-config + echo '[ $? -eq 127 ] && echo "Could not find python'${LDVersion}'-`uname -m`-config. Look around to see available arches." >&2' >> \ + %{buildroot}%{_bindir}/python${LDVersion}-config + chmod +x %{buildroot}%{_bindir}/python${LDVersion}-config + + # Make python3-devel multilib-ready + mv %{buildroot}%{_includedir}/python${LDVersion}/pyconfig.h \ + %{buildroot}%{_includedir}/python${LDVersion}/%{_pyconfig_h} + cat > %{buildroot}%{_includedir}/python${LDVersion}/pyconfig.h << EOF +#include + +#if __WORDSIZE == 32 +#include "%{_pyconfig32_h}" +#elif __WORDSIZE == 64 +#include "%{_pyconfig64_h}" +#else +#error "Unknown word size" +#endif +EOF + + # Systemtap hooks +%if %{with systemtap} + mkdir -p %{buildroot}%{tapsetdir} + sed \ + -e "s|LIBRARY_PATH|%{_libdir}/${PyInstSoName}|" \ + -e 's|"python3"|"python3${Postfix}"|' \ + %{_sourcedir}/libpython.stp \ + > %{buildroot}%{tapsetdir}/libpython%{pybasever}${Postfix}-%{wordsize}.stp +%endif # with systemtap + + echo FINISHED: INSTALL OF PYTHON FOR CONFIGURATION: $ConfName +} + +# Install the "debug" build first; any common files will be overridden with +# later builds +%if %{with debug_build} +InstallPython debug \ + %{py_INSTSONAME_debug} \ + -O0 \ + %{LDVERSION_debug} \ + -debug +%endif # with debug_build + +# Now the optimized build: +InstallPython optimized \ + %{py_INSTSONAME_optimized} \ + "" \ + %{LDVERSION_optimized} \ + "" + +# Install directories for additional packages +install -d -m 0755 %{buildroot}%{pylibdir}/site-packages/__pycache__ +%if "%{_lib}" == "lib64" +# The 64-bit version needs to create "site-packages" in /usr/lib/ (for +# pure-Python modules) as well as in /usr/lib64/ (for packages with extension +# modules). +# Note that rpmlint will complain about hardcoded library path; +# this is intentional. +install -d -m 0755 %{buildroot}%{_prefix}/lib/python%{pybasever}/site-packages/__pycache__ +%endif + +%if %{without flatpackage} +# add idle3 to menu +install -D -m 0644 Lib/idlelib/Icons/idle_16.png %{buildroot}%{_datadir}/icons/hicolor/16x16/apps/idle3.png +install -D -m 0644 Lib/idlelib/Icons/idle_32.png %{buildroot}%{_datadir}/icons/hicolor/32x32/apps/idle3.png +install -D -m 0644 Lib/idlelib/Icons/idle_48.png %{buildroot}%{_datadir}/icons/hicolor/48x48/apps/idle3.png +desktop-file-install --dir=%{buildroot}%{_datadir}/applications %{SOURCE10} + +# Install and validate appdata file +mkdir -p %{buildroot}%{_datadir}/appdata +cp -a %{SOURCE11} %{buildroot}%{_datadir}/appdata +appstream-util validate-relax --nonet %{buildroot}%{_datadir}/appdata/idle3.appdata.xml +%endif + +# Development tools +install -m755 -d %{buildroot}%{pylibdir}/Tools +install Tools/README %{buildroot}%{pylibdir}/Tools/ +cp -ar Tools/freeze %{buildroot}%{pylibdir}/Tools/ +cp -ar Tools/i18n %{buildroot}%{pylibdir}/Tools/ +cp -ar Tools/pynche %{buildroot}%{pylibdir}/Tools/ +cp -ar Tools/scripts %{buildroot}%{pylibdir}/Tools/ + +# Documentation tools +install -m755 -d %{buildroot}%{pylibdir}/Doc +cp -ar Doc/tools %{buildroot}%{pylibdir}/Doc/ + +# Demo scripts +cp -ar Tools/demo %{buildroot}%{pylibdir}/Tools/ + +# Make sure distutils looks at the right pyconfig.h file +# See https://bugzilla.redhat.com/show_bug.cgi?id=201434 +# Similar for sysconfig: sysconfig.get_config_h_filename tries to locate +# pyconfig.h so it can be parsed, and needs to do this at runtime in site.py +# when python starts up (see https://bugzilla.redhat.com/show_bug.cgi?id=653058) +# +# Split this out so it goes directly to the pyconfig-32.h/pyconfig-64.h +# variants: +sed -i -e "s/'pyconfig.h'/'%{_pyconfig_h}'/" \ + %{buildroot}%{pylibdir}/distutils/sysconfig.py \ + %{buildroot}%{pylibdir}/sysconfig.py + +# Switch all shebangs to refer to the specific Python version. +# This currently only covers files matching ^[a-zA-Z0-9_]+\.py$, +# so handle files named using other naming scheme separately. +LD_LIBRARY_PATH=./build/optimized ./build/optimized/python \ + Tools/scripts/pathfix.py \ + -i "%{_bindir}/python%{pybasever}" \ + %{buildroot} %{buildroot}%{pylibdir}/Tools/scripts/*-*.py \ + %{buildroot}%{pylibdir}/Tools/pynche/{pynche,pynche.pyw} +# not covered, also redundant and useless: +rm %{buildroot}%{pylibdir}/Tools/scripts/{2to3,idle3,pydoc3,pyvenv} + + +# Move pathfix.py to bindir +# See https://github.com/fedora-python/python-rpm-porting/issues/24 +mv %{buildroot}%{pylibdir}/Tools/scripts/pathfix.py %{buildroot}%{_bindir}/ + + +# Remove shebang lines from .py files that aren't executable, and +# remove executability from .py files that don't have a shebang line: +find %{buildroot} -name \*.py \ + \( \( \! -perm /u+x,g+x,o+x -exec sed -e '/^#!/Q 0' -e 'Q 1' {} \; \ + -print -exec sed -i '1d' {} \; \) -o \( \ + -perm /u+x,g+x,o+x ! -exec grep -m 1 -q '^#!' {} \; \ + -exec chmod a-x {} \; \) \) + +# Remove executable flag from files that shouldn't have it: +chmod a-x \ + %{buildroot}%{pylibdir}/Tools/README + +# Get rid of DOS batch files: +find %{buildroot} -name \*.bat -exec rm {} \; + +# Get rid of backup files: +find %{buildroot}/ -name "*~" -exec rm -f {} \; +find . -name "*~" -exec rm -f {} \; + +# Get rid of a stray copy of the license: +rm %{buildroot}%{pylibdir}/LICENSE.txt + +# Do bytecompilation with the newly installed interpreter. +# This is similar to the script in macros.pybytecompile +# compile *.pyc +find %{buildroot} -type f -a -name "*.py" -print0 | \ + LD_LIBRARY_PATH="%{buildroot}%{dynload_dir}/:%{buildroot}%{_libdir}" \ + PYTHONPATH="%{buildroot}%{_libdir}/python%{pybasever} %{buildroot}%{_libdir}/python%{pybasever}/site-packages" \ + xargs -0 %{buildroot}%{_bindir}/python%{pybasever} -O -c 'import py_compile, sys; [py_compile.compile(f, dfile=f.partition("%{buildroot}")[2], optimize=opt) for opt in range(3) for f in sys.argv[1:]]' || : + +# Since we have pathfix.py in bindir, this is created, but we don't want it +rm -rf %{buildroot}%{_bindir}/__pycache__ + +# Fixup permissions for shared libraries from non-standard 555 to standard 755: +find %{buildroot} -perm 555 -exec chmod 755 {} \; + +# Install macros for rpm: +mkdir -p %{buildroot}/%{_rpmconfigdir}/macros.d/ +install -m 644 %{SOURCE3} %{buildroot}/%{_rpmconfigdir}/macros.d/ +%if %{without flatpackage} +install -m 644 %{SOURCE9} %{buildroot}/%{_rpmconfigdir}/macros.d/ +%endif + +# Create "/usr/bin/python3-debug", a symlink to the python3 debug binary, to +# avoid the user having to know the precise version and ABI flags. +# See e.g. https://bugzilla.redhat.com/show_bug.cgi?id=676748 +%if %{with debug_build} && %{without flatpackage} +ln -s \ + %{_bindir}/python%{LDVERSION_debug} \ + %{buildroot}%{_bindir}/python3-debug +%endif + +# System Python: Link the executable to libexec +# This is for backwards compatibility only and should be removed in Fedora 29 +%if %{without flatpackage} +mkdir -p %{buildroot}%{_libexecdir} +ln -s %{_bindir}/python%{pybasever} %{buildroot}%{_libexecdir}/system-python +%endif + + +# ====================================================== +# Checks for packaging issues +# ====================================================== + +%check + +# first of all, check timestamps of bytecode files +find %{buildroot} -type f -a -name "*.py" -print0 | \ + LD_LIBRARY_PATH="%{buildroot}%{dynload_dir}/:%{buildroot}%{_libdir}" \ + PYTHONPATH="%{buildroot}%{_libdir}/python%{pybasever} %{buildroot}%{_libdir}/python%{pybasever}/site-packages" \ + xargs -0 %{buildroot}%{_bindir}/python%{pybasever} %{SOURCE8} + +# Ensure that the curses module was linked against libncursesw.so, rather than +# libncurses.so +# See https://bugzilla.redhat.com/show_bug.cgi?id=539917 +ldd %{buildroot}/%{dynload_dir}/_curses*.so \ + | grep curses \ + | grep libncurses.so && (echo "_curses.so linked against libncurses.so" ; exit 1) + +# Ensure that the debug modules are linked against the debug libpython, and +# likewise for the optimized modules and libpython: +for Module in %{buildroot}/%{dynload_dir}/*.so ; do + case $Module in + *.%{SOABI_debug}) + ldd $Module | grep %{py_INSTSONAME_optimized} && + (echo Debug module $Module linked against optimized %{py_INSTSONAME_optimized} ; exit 1) + + ;; + *.%{SOABI_optimized}) + ldd $Module | grep %{py_INSTSONAME_debug} && + (echo Optimized module $Module linked against debug %{py_INSTSONAME_debug} ; exit 1) + ;; + esac +done + +# There's no point of having this, as decided in +# https://bugzilla.redhat.com/show_bug.cgi?id=1111275 +rm %{buildroot}%{_bindir}/2to3-%{pybasever} + +%if %{with flatpackage} +# Remove stuff that would conflict with python3 package +mv %{buildroot}%{_bindir}/python{3,%{pyshortver}} +rm %{buildroot}%{_bindir}/pydoc3 +rm %{buildroot}%{_bindir}/pathfix.py +rm %{buildroot}%{_bindir}/idle3 +rm %{buildroot}%{_bindir}/python3-* +rm %{buildroot}%{_bindir}/pyvenv +rm %{buildroot}%{_bindir}/2to3 +rm %{buildroot}%{_libdir}/libpython3.so +rm %{buildroot}%{_mandir}/man1/python3.1* +rm %{buildroot}%{_libdir}/pkgconfig/python3.pc +%endif + + +# ====================================================== +# Running the upstream test suite +# ====================================================== + +# For ppc64 we need a larger stack than default +# See https://bugzilla.redhat.com/show_bug.cgi?id=1292462 +%ifarch %{power64} + ulimit -a + ulimit -s 16384 +%endif + + +topdir=$(pwd) +CheckPython() { + ConfName=$1 + ConfDir=$(pwd)/build/$ConfName + + echo STARTING: CHECKING OF PYTHON FOR CONFIGURATION: $ConfName + + # Note that we're running the tests using the version of the code in the + # builddir, not in the buildroot. + + # Run the upstream test suite, setting "WITHIN_PYTHON_RPM_BUILD" so that the + # our non-standard decorators take effect on the relevant tests: + # @unittest._skipInRpmBuild(reason) + # @unittest._expectedFailureInRpmBuild + # test_faulthandler.test_register_chain currently fails on ppc64le and + # aarch64, see upstream bug http://bugs.python.org/issue21131 + WITHIN_PYTHON_RPM_BUILD= \ + LD_LIBRARY_PATH=$ConfDir $ConfDir/python -m test.regrtest \ + -wW --slowest --findleaks \ + -x test_distutils \ + -x test_bdist_rpm \ + %ifarch ppc64le aarch64 + -x test_faulthandler \ + %endif + %ifarch %{mips64} + -x test_ctypes \ + %endif + %ifarch %{power64} s390 s390x armv7hl aarch64 %{mips} + -x test_gdb + %endif + + echo FINISHED: CHECKING OF PYTHON FOR CONFIGURATION: $ConfName + +} + +%if %{with tests} + +# Check each of the configurations: +%if %{with debug_build} +CheckPython debug +%endif # with debug_build +CheckPython optimized + +%endif # with tests + + +# ====================================================== +# Scriptlets +# ====================================================== + +%if %{without flatpackage} + +%post libs -p /sbin/ldconfig + +%postun libs -p /sbin/ldconfig + +%posttrans +/usr/bin/gtk-update-icon-cache %{_datadir}/icons/hicolor &>/dev/null || : + +%endif + + +%post +%if %{with flatpackage} +/sbin/ldconfig +%else +/bin/touch --no-create %{_datadir}/icons/hicolor &>/dev/null || : +%endif + +%postun +%if %{with flatpackage} +/sbin/ldconfig +%else +if [ $1 -eq 0 ] ; then + /bin/touch --no-create %{_datadir}/icons/hicolor &>/dev/null + /usr/bin/gtk-update-icon-cache %{_datadir}/icons/hicolor &>/dev/null || : +fi +%endif + +%files +%defattr(-, root, root) +%license LICENSE +%doc README.rst + +%if %{without flatpackage} +%{_bindir}/pydoc* +%{_bindir}/python3 +%{_bindir}/pyvenv +# Remove in Fedora 29: +%{_libexecdir}/system-python +%else +%{_bindir}/pydoc%{pybasever} +%{_bindir}/python%{pyshortver} +%endif + +%{_bindir}/python%{pybasever} +%{_bindir}/python%{pybasever}m +%{_bindir}/pyvenv-%{pybasever} +%{_mandir}/*/* + + +%if %{without flatpackage} +%files libs +%defattr(-,root,root,-) +%license LICENSE +%doc README.rst +%endif + +%dir %{pylibdir} +%dir %{dynload_dir} + +%{pylibdir}/lib2to3 +%if %{without flatpackage} +%exclude %{pylibdir}/lib2to3/tests +%endif + +%dir %{pylibdir}/unittest/ +%dir %{pylibdir}/unittest/__pycache__/ +%{pylibdir}/unittest/*.py +%{pylibdir}/unittest/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/asyncio/ +%dir %{pylibdir}/asyncio/__pycache__/ +%{pylibdir}/asyncio/*.py +%{pylibdir}/asyncio/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/venv/ +%dir %{pylibdir}/venv/__pycache__/ +%{pylibdir}/venv/*.py +%{pylibdir}/venv/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/venv/scripts + +%{pylibdir}/wsgiref +%{pylibdir}/xmlrpc + +%dir %{pylibdir}/ensurepip/ +%dir %{pylibdir}/ensurepip/__pycache__/ +%{pylibdir}/ensurepip/*.py +%{pylibdir}/ensurepip/__pycache__/*%{bytecode_suffixes} + +%if %{without flatpackage} +%exclude %{pylibdir}/ensurepip/_bundled +%else +%dir %{pylibdir}/ensurepip/_bundled +%{pylibdir}/ensurepip/_bundled/*.whl +%endif + +%if %{with rewheel} +%dir %{pylibdir}/ensurepip/rewheel/ +%dir %{pylibdir}/ensurepip/rewheel/__pycache__/ +%{pylibdir}/ensurepip/rewheel/*.py +%{pylibdir}/ensurepip/rewheel/__pycache__/*%{bytecode_suffixes} +%endif + +%{pylibdir}/idlelib + +%dir %{pylibdir}/test/ +%dir %{pylibdir}/test/__pycache__/ +%dir %{pylibdir}/test/support/ +%dir %{pylibdir}/test/support/__pycache__/ +%{pylibdir}/test/__init__.py +%{pylibdir}/test/__pycache__/__init__%{bytecode_suffixes} +%{pylibdir}/test/support/__init__.py +%{pylibdir}/test/support/__pycache__/__init__%{bytecode_suffixes} + +%dir %{pylibdir}/concurrent/ +%dir %{pylibdir}/concurrent/__pycache__/ +%{pylibdir}/concurrent/*.py +%{pylibdir}/concurrent/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/concurrent/futures/ +%dir %{pylibdir}/concurrent/futures/__pycache__/ +%{pylibdir}/concurrent/futures/*.py +%{pylibdir}/concurrent/futures/__pycache__/*%{bytecode_suffixes} + +%{pylibdir}/pydoc_data + +%{dynload_dir}/_blake2.%{SOABI_optimized}.so +%{dynload_dir}/_md5.%{SOABI_optimized}.so +%{dynload_dir}/_sha1.%{SOABI_optimized}.so +%{dynload_dir}/_sha256.%{SOABI_optimized}.so +%{dynload_dir}/_sha3.%{SOABI_optimized}.so +%{dynload_dir}/_sha512.%{SOABI_optimized}.so + +%{dynload_dir}/_asyncio.%{SOABI_optimized}.so +%{dynload_dir}/_bisect.%{SOABI_optimized}.so +%{dynload_dir}/_bz2.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_cn.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_hk.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_iso2022.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_jp.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_kr.%{SOABI_optimized}.so +%{dynload_dir}/_codecs_tw.%{SOABI_optimized}.so +%{dynload_dir}/_crypt.%{SOABI_optimized}.so +%{dynload_dir}/_csv.%{SOABI_optimized}.so +%{dynload_dir}/_ctypes.%{SOABI_optimized}.so +%{dynload_dir}/_curses.%{SOABI_optimized}.so +%{dynload_dir}/_curses_panel.%{SOABI_optimized}.so +%{dynload_dir}/_dbm.%{SOABI_optimized}.so +%{dynload_dir}/_decimal.%{SOABI_optimized}.so +%{dynload_dir}/_elementtree.%{SOABI_optimized}.so +%if %{with gdbm} +%{dynload_dir}/_gdbm.%{SOABI_optimized}.so +%endif +%{dynload_dir}/_hashlib.%{SOABI_optimized}.so +%{dynload_dir}/_heapq.%{SOABI_optimized}.so +%{dynload_dir}/_json.%{SOABI_optimized}.so +%{dynload_dir}/_lsprof.%{SOABI_optimized}.so +%{dynload_dir}/_lzma.%{SOABI_optimized}.so +%{dynload_dir}/_multibytecodec.%{SOABI_optimized}.so +%{dynload_dir}/_multiprocessing.%{SOABI_optimized}.so +%{dynload_dir}/_opcode.%{SOABI_optimized}.so +%{dynload_dir}/_pickle.%{SOABI_optimized}.so +%{dynload_dir}/_posixsubprocess.%{SOABI_optimized}.so +%{dynload_dir}/_random.%{SOABI_optimized}.so +%{dynload_dir}/_socket.%{SOABI_optimized}.so +%{dynload_dir}/_sqlite3.%{SOABI_optimized}.so +%{dynload_dir}/_ssl.%{SOABI_optimized}.so +%{dynload_dir}/_struct.%{SOABI_optimized}.so +%{dynload_dir}/array.%{SOABI_optimized}.so +%{dynload_dir}/audioop.%{SOABI_optimized}.so +%{dynload_dir}/binascii.%{SOABI_optimized}.so +%{dynload_dir}/cmath.%{SOABI_optimized}.so +%{dynload_dir}/_datetime.%{SOABI_optimized}.so +%{dynload_dir}/fcntl.%{SOABI_optimized}.so +%{dynload_dir}/grp.%{SOABI_optimized}.so +%{dynload_dir}/math.%{SOABI_optimized}.so +%{dynload_dir}/mmap.%{SOABI_optimized}.so +%{dynload_dir}/nis.%{SOABI_optimized}.so +%{dynload_dir}/ossaudiodev.%{SOABI_optimized}.so +%{dynload_dir}/parser.%{SOABI_optimized}.so +%{dynload_dir}/pyexpat.%{SOABI_optimized}.so +%{dynload_dir}/readline.%{SOABI_optimized}.so +%{dynload_dir}/resource.%{SOABI_optimized}.so +%{dynload_dir}/select.%{SOABI_optimized}.so +%{dynload_dir}/spwd.%{SOABI_optimized}.so +%{dynload_dir}/syslog.%{SOABI_optimized}.so +%{dynload_dir}/termios.%{SOABI_optimized}.so +#%{dynload_dir}/time.%{SOABI_optimized}.so +%{dynload_dir}/_testmultiphase.%{SOABI_optimized}.so +%{dynload_dir}/unicodedata.%{SOABI_optimized}.so +%{dynload_dir}/_uuid.%{SOABI_optimized}.so +%{dynload_dir}/xxlimited.%{SOABI_optimized}.so +%{dynload_dir}/zlib.%{SOABI_optimized}.so + +%dir %{pylibdir}/site-packages/ +%dir %{pylibdir}/site-packages/__pycache__/ +%{pylibdir}/site-packages/README.txt +%{pylibdir}/*.py +%dir %{pylibdir}/__pycache__/ +%{pylibdir}/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/collections/ +%dir %{pylibdir}/collections/__pycache__/ +%{pylibdir}/collections/*.py +%{pylibdir}/collections/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/ctypes/ +%dir %{pylibdir}/ctypes/__pycache__/ +%{pylibdir}/ctypes/*.py +%{pylibdir}/ctypes/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/ctypes/macholib + +%{pylibdir}/curses + +%dir %{pylibdir}/dbm/ +%dir %{pylibdir}/dbm/__pycache__/ +%{pylibdir}/dbm/*.py +%{pylibdir}/dbm/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/distutils/ +%dir %{pylibdir}/distutils/__pycache__/ +%{pylibdir}/distutils/*.py +%{pylibdir}/distutils/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/distutils/README +%{pylibdir}/distutils/command +%exclude %{pylibdir}/distutils/command/wininst-*.exe + +%dir %{pylibdir}/email/ +%dir %{pylibdir}/email/__pycache__/ +%{pylibdir}/email/*.py +%{pylibdir}/email/__pycache__/*%{bytecode_suffixes} +%{pylibdir}/email/mime +%doc %{pylibdir}/email/architecture.rst + +%{pylibdir}/encodings + +%{pylibdir}/html +%{pylibdir}/http + +%dir %{pylibdir}/importlib/ +%dir %{pylibdir}/importlib/__pycache__/ +%{pylibdir}/importlib/*.py +%{pylibdir}/importlib/__pycache__/*%{bytecode_suffixes} + +%dir %{pylibdir}/json/ +%dir %{pylibdir}/json/__pycache__/ +%{pylibdir}/json/*.py +%{pylibdir}/json/__pycache__/*%{bytecode_suffixes} + +%{pylibdir}/logging +%{pylibdir}/multiprocessing + +%dir %{pylibdir}/sqlite3/ +%dir %{pylibdir}/sqlite3/__pycache__/ +%{pylibdir}/sqlite3/*.py +%{pylibdir}/sqlite3/__pycache__/*%{bytecode_suffixes} + +%if %{without flatpackage} +%exclude %{pylibdir}/turtle.py +%exclude %{pylibdir}/__pycache__/turtle*%{bytecode_suffixes} +%endif + +%{pylibdir}/urllib +%{pylibdir}/xml + +%if "%{_lib}" == "lib64" +%attr(0755,root,root) %dir %{_prefix}/lib/python%{pybasever} +%attr(0755,root,root) %dir %{_prefix}/lib/python%{pybasever}/site-packages +%attr(0755,root,root) %dir %{_prefix}/lib/python%{pybasever}/site-packages/__pycache__/ +%endif + +# "Makefile" and the config-32/64.h file are needed by +# distutils/sysconfig.py:_init_posix(), so we include them in the core +# package, along with their parent directories (bug 531901): +%dir %{pylibdir}/config-%{LDVERSION_optimized}-%{_arch}-linux%{_gnu}/ +%{pylibdir}/config-%{LDVERSION_optimized}-%{_arch}-linux%{_gnu}/Makefile +%dir %{_includedir}/python%{LDVERSION_optimized}/ +%{_includedir}/python%{LDVERSION_optimized}/%{_pyconfig_h} + +%{_libdir}/%{py_INSTSONAME_optimized} +%if %{without flatpackage} +%{_libdir}/libpython3.so +%endif +%if %{with systemtap} +%dir %(dirname %{tapsetdir}) +%dir %{tapsetdir} +%{tapsetdir}/%{libpython_stp_optimized} +%doc systemtap-example.stp pyfuntop.stp +%endif + + +%if %{without flatpackage} +%files devel +%defattr(-,root,root) +%endif + +%{pylibdir}/config-%{LDVERSION_optimized}-%{_arch}-linux%{_gnu}/* +%if %{without flatpackage} +%exclude %{pylibdir}/config-%{LDVERSION_optimized}-%{_arch}-linux%{_gnu}/Makefile +%exclude %{_includedir}/python%{LDVERSION_optimized}/%{_pyconfig_h} +%endif +%{pylibdir}/distutils/command/wininst-*.exe +%{_includedir}/python%{LDVERSION_optimized}/*.h +%doc Misc/README.valgrind Misc/valgrind-python.supp Misc/gdbinit + +%if %{without flatpackage} +%{_bindir}/python3-config +%{_libdir}/pkgconfig/python3.pc +%{_rpmconfigdir}/macros.d/macros.systempython +%{_bindir}/pathfix.py +%endif + +%{_bindir}/python%{pybasever}-config +%{_bindir}/python%{LDVERSION_optimized}-config +%{_bindir}/python%{LDVERSION_optimized}-*-config +%{_libdir}/libpython%{LDVERSION_optimized}.so +%{_libdir}/pkgconfig/python-%{LDVERSION_optimized}.pc +%{_libdir}/pkgconfig/python-%{pybasever}.pc +%{_rpmconfigdir}/macros.d/macros.pybytecompile%{pybasever} + + +%if %{without flatpackage} +%files tools +%defattr(-,root,root,755) + +%{_bindir}/2to3 +%{_bindir}/idle* +%else +%{_bindir}/idle%{pybasever} +%endif + +%{pylibdir}/Tools +%doc %{pylibdir}/Doc +%if %{without flatpackage} +%{_datadir}/appdata/idle3.appdata.xml +%{_datadir}/applications/idle3.desktop +%{_datadir}/icons/hicolor/*/apps/idle3.* +%endif + +%if %{without flatpackage} +%files tkinter +%defattr(-,root,root,755) +%endif + +%{pylibdir}/tkinter +%if %{without flatpackage} +%exclude %{pylibdir}/tkinter/test +%endif +%{dynload_dir}/_tkinter.%{SOABI_optimized}.so +%{pylibdir}/turtle.py +%{pylibdir}/__pycache__/turtle*%{bytecode_suffixes} +%dir %{pylibdir}/turtledemo +%{pylibdir}/turtledemo/*.py +%{pylibdir}/turtledemo/*.cfg +%dir %{pylibdir}/turtledemo/__pycache__/ +%{pylibdir}/turtledemo/__pycache__/*%{bytecode_suffixes} + + +%if %{without flatpackage} +%files test +%defattr(-, root, root) +%endif + +%{pylibdir}/ctypes/test +%{pylibdir}/distutils/tests +%{pylibdir}/sqlite3/test +%{pylibdir}/test +%{dynload_dir}/_ctypes_test.%{SOABI_optimized}.so +%{dynload_dir}/_testbuffer.%{SOABI_optimized}.so +%{dynload_dir}/_testcapi.%{SOABI_optimized}.so +%{dynload_dir}/_testimportmultiple.%{SOABI_optimized}.so +%{dynload_dir}/_xxtestfuzz.%{SOABI_optimized}.so +%{pylibdir}/lib2to3/tests +%{pylibdir}/tkinter/test +%{pylibdir}/unittest/test + + +# We don't bother splitting the debug build out into further subpackages: +# if you need it, you're probably a developer. + +# Hence the manifest is the combination of analogous files in the manifests of +# all of the other subpackages + +%if %{with debug_build} +%if %{without flatpackage} +%files debug +%defattr(-,root,root,-) +%{_bindir}/python3-debug +%endif + +# Analog of the core subpackage's files: +%{_bindir}/python%{LDVERSION_debug} + +# Analog of the -libs subpackage's files: +# ...with debug builds of the built-in "extension" modules: + +%{dynload_dir}/_blake2.%{SOABI_debug}.so +%{dynload_dir}/_md5.%{SOABI_debug}.so +%{dynload_dir}/_sha1.%{SOABI_debug}.so +%{dynload_dir}/_sha256.%{SOABI_debug}.so +%{dynload_dir}/_sha3.%{SOABI_debug}.so +%{dynload_dir}/_sha512.%{SOABI_debug}.so + +%{dynload_dir}/_asyncio.%{SOABI_debug}.so +%{dynload_dir}/_bisect.%{SOABI_debug}.so +%{dynload_dir}/_bz2.%{SOABI_debug}.so +%{dynload_dir}/_codecs_cn.%{SOABI_debug}.so +%{dynload_dir}/_codecs_hk.%{SOABI_debug}.so +%{dynload_dir}/_codecs_iso2022.%{SOABI_debug}.so +%{dynload_dir}/_codecs_jp.%{SOABI_debug}.so +%{dynload_dir}/_codecs_kr.%{SOABI_debug}.so +%{dynload_dir}/_codecs_tw.%{SOABI_debug}.so +%{dynload_dir}/_crypt.%{SOABI_debug}.so +%{dynload_dir}/_csv.%{SOABI_debug}.so +%{dynload_dir}/_ctypes.%{SOABI_debug}.so +%{dynload_dir}/_curses.%{SOABI_debug}.so +%{dynload_dir}/_curses_panel.%{SOABI_debug}.so +%{dynload_dir}/_dbm.%{SOABI_debug}.so +%{dynload_dir}/_decimal.%{SOABI_debug}.so +%{dynload_dir}/_elementtree.%{SOABI_debug}.so +%if %{with gdbm} +%{dynload_dir}/_gdbm.%{SOABI_debug}.so +%endif +%{dynload_dir}/_hashlib.%{SOABI_debug}.so +%{dynload_dir}/_heapq.%{SOABI_debug}.so +%{dynload_dir}/_json.%{SOABI_debug}.so +%{dynload_dir}/_lsprof.%{SOABI_debug}.so +%{dynload_dir}/_lzma.%{SOABI_debug}.so +%{dynload_dir}/_multibytecodec.%{SOABI_debug}.so +%{dynload_dir}/_multiprocessing.%{SOABI_debug}.so +%{dynload_dir}/_opcode.%{SOABI_debug}.so +%{dynload_dir}/_pickle.%{SOABI_debug}.so +%{dynload_dir}/_posixsubprocess.%{SOABI_debug}.so +%{dynload_dir}/_random.%{SOABI_debug}.so +%{dynload_dir}/_socket.%{SOABI_debug}.so +%{dynload_dir}/_sqlite3.%{SOABI_debug}.so +%{dynload_dir}/_ssl.%{SOABI_debug}.so +%{dynload_dir}/_struct.%{SOABI_debug}.so +%{dynload_dir}/array.%{SOABI_debug}.so +%{dynload_dir}/audioop.%{SOABI_debug}.so +%{dynload_dir}/binascii.%{SOABI_debug}.so +%{dynload_dir}/cmath.%{SOABI_debug}.so +%{dynload_dir}/_datetime.%{SOABI_debug}.so +%{dynload_dir}/fcntl.%{SOABI_debug}.so +%{dynload_dir}/grp.%{SOABI_debug}.so +%{dynload_dir}/math.%{SOABI_debug}.so +%{dynload_dir}/mmap.%{SOABI_debug}.so +%{dynload_dir}/nis.%{SOABI_debug}.so +%{dynload_dir}/ossaudiodev.%{SOABI_debug}.so +%{dynload_dir}/parser.%{SOABI_debug}.so +%{dynload_dir}/pyexpat.%{SOABI_debug}.so +%{dynload_dir}/readline.%{SOABI_debug}.so +%{dynload_dir}/resource.%{SOABI_debug}.so +%{dynload_dir}/select.%{SOABI_debug}.so +%{dynload_dir}/spwd.%{SOABI_debug}.so +%{dynload_dir}/syslog.%{SOABI_debug}.so +%{dynload_dir}/termios.%{SOABI_debug}.so +#%{dynload_dir}/time.%{SOABI_debug}.so +%{dynload_dir}/_testmultiphase.%{SOABI_debug}.so +%{dynload_dir}/unicodedata.%{SOABI_debug}.so +%{dynload_dir}/_uuid.%{SOABI_debug}.so +%{dynload_dir}/_xxtestfuzz.%{SOABI_debug}.so +%{dynload_dir}/zlib.%{SOABI_debug}.so + +# No need to split things out the "Makefile" and the config-32/64.h file as we +# do for the regular build above (bug 531901), since they're all in one package +# now; they're listed below, under "-devel": + +%{_libdir}/%{py_INSTSONAME_debug} +%if %{with systemtap} +%dir %(dirname %{tapsetdir}) +%dir %{tapsetdir} +%{tapsetdir}/%{libpython_stp_debug} +%endif + +# Analog of the -devel subpackage's files: +%{pylibdir}/config-%{LDVERSION_debug}-%{_arch}-linux%{_gnu} +%{_includedir}/python%{LDVERSION_debug} +%{_bindir}/python%{LDVERSION_debug}-config +%{_bindir}/python%{LDVERSION_debug}-*-config +%{_libdir}/libpython%{LDVERSION_debug}.so +%{_libdir}/libpython%{LDVERSION_debug}.so.1.0 +%{_libdir}/pkgconfig/python-%{LDVERSION_debug}.pc + +# Analog of the -tools subpackage's files: +# None for now; we could build precanned versions that have the appropriate +# shebang if needed + +# Analog of the tkinter subpackage's files: +%{dynload_dir}/_tkinter.%{SOABI_debug}.so + +# Analog of the -test subpackage's files: +%{dynload_dir}/_ctypes_test.%{SOABI_debug}.so +%{dynload_dir}/_testbuffer.%{SOABI_debug}.so +%{dynload_dir}/_testcapi.%{SOABI_debug}.so +%{dynload_dir}/_testimportmultiple.%{SOABI_debug}.so + +%endif # with debug_build + +# We put the debug-gdb.py file inside /usr/lib/debug to avoid noise from ldconfig +# See https://bugzilla.redhat.com/show_bug.cgi?id=562980 +# +# The /usr/lib/rpm/redhat/macros defines %__debug_package to use +# debugfiles.list, and it appears that everything below /usr/lib/debug and +# (/usr/src/debug) gets added to this file (via LISTFILES) in +# /usr/lib/rpm/find-debuginfo.sh +# +# Hence by installing it below /usr/lib/debug we ensure it is added to the +# -debuginfo subpackage +# (if it doesn't, then the rpmbuild ought to fail since the debug-gdb.py +# payload file would be unpackaged) + +# Workaround for https://bugzilla.redhat.com/show_bug.cgi?id=1476593 +%undefine _debuginfo_subpackages + +# ====================================================== +# Finally, the changelog: +# ====================================================== + +%changelog +* Tue Nov 28 2017 Miro Hrončok - 3.7.0-0.1.a2 +- Update to 3.7.0 alpha 2 +- Removed merged patches 262, 277, 279 + +* Tue Nov 21 2017 Miro Hrončok - 3.6.3-4 +- Raise the release of platform-python obsoletes for better maintainability + +* Wed Nov 15 2017 Miro Hrončok - 3.6.3-3 +- Obsolete platform-python and it's subpackages + +* Mon Oct 09 2017 Charalampos Stratakis - 3.6.3-2 +- Fix memory corruption due to allocator mix +Resolves: rhbz#1498207 + +* Fri Oct 06 2017 Charalampos Stratakis - 3.6.3-1 +- Update to Python 3.6.3 + +* Fri Sep 29 2017 Miro Hrončok - 3.6.2-19 +- Move pathfix.py to bindir, https://github.com/fedora-python/python-rpm-porting/issues/24 +- Make the -devel package require redhat-rpm-config +Resolves: rhbz#1496757 + +* Wed Sep 13 2017 Iryna Shcherbina - 3.6.2-18 +- Fix /usr/bin/env dependency from python3-tools +Resolves: rhbz#1482118 + +* Wed Sep 06 2017 Iryna Shcherbina - 3.6.2-17 +- Include `-g` in the flags sent to the linker (LDFLAGS) +Resolves: rhbz#1483222 + +* Tue Sep 05 2017 Petr Viktorin - 3.6.2-16 +- Specfile cleanup +- Make the main description also applicable to the SRPM +- Add audiotest.au to the test package + +* Fri Sep 01 2017 Miro Hrončok - 3.6.2-15 +- Remove %%{pylibdir}/Tools/scripts/2to3 + +* Fri Sep 01 2017 Miro Hrončok - 3.6.2-14 +- Expat >= 2.1.0 is everywhere, remove explicit requires +- Conditionalize systemtap-devel BuildRequires +- For consistency, require /usr/sbin/ifconfig instead of net-tools + +* Mon Aug 28 2017 Petr Viktorin - 3.6.2-13 +- Rename patch files to be consistent +- Run autotools to generate the configure script before building +- Merge lib64 patches (104 into 102) +- Skip test_bdist_rpm using test config rather than a patch (removes patch 137) +- Remove patches 157 and 186, which had test changes left over after upstreaming +- Remove patch 188, a temporary workaround for hashlib tests +- Merge patches 180, 206, 243, 5001 (architecture naming) into new patch 274 +- Move python2-tools conflicts to tools subpackage (it was wrongly in tkinter) + +* Mon Aug 28 2017 Michal Cyprian - 3.6.2-12 +- Use python3 style of calling super() without arguments in rpath + patch to prevent recursion in UnixCCompiler subclasses +Resolves: rhbz#1458122 + +* Mon Aug 21 2017 Petr Viktorin - 3.6.2-11 +- Add bcond for --without optimizations +- Reword package descriptions +- Remove Group declarations +- Skip failing test_float_with_comma + +* Mon Aug 21 2017 Miro Hrončok - 3.6.2-10 +- Remove system-python, see https://fedoraproject.org/wiki/Changes/Platform_Python_Stack + +* Wed Aug 16 2017 Petr Viktorin - 3.6.2-9 +- Use bconds for configuring the build +- Reorganize the initial sections + +* Wed Aug 16 2017 Miro Hrončok - 3.6.2-8 +- Have /usr/bin/2to3 (rhbz#1111275) +- Provide 2to3 and idle3, list them in summary and description (rhbz#1076401) + +* Fri Aug 11 2017 Michal Cyprian - 3.6.2-7 +- Revert "Add --executable option to install.py command" + This enhancement is currently not needed and it can possibly + collide with `pip --editable`option + +* Mon Aug 07 2017 Iryna Shcherbina - 3.6.2-6 +- Fix the "urllib FTP protocol stream injection" vulnerability +Resolves: rhbz#1478916 + +* Tue Aug 01 2017 Tomas Orsava - 3.6.2-5 +- Dropped BuildRequires on db4-devel which was useful for Python 2 (module + bsddb), however, no longer needod for Python 3 +- Tested building Python 3 with and without the dependency, all tests pass and + filelists of resulting RPMs are identical + +* Sun Jul 30 2017 Florian Weimer - 3.6.2-4 +- Do not generate debuginfo subpackages (#1476593) +- Rebuild with binutils fix for ppc64le (#1475636) + +* Thu Jul 27 2017 Fedora Release Engineering - 3.6.2-3 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_27_Mass_Rebuild + +* Tue Jul 25 2017 Charalampos Stratakis - 3.6.2-2 +- Make test_asyncio to not depend on the current SIGHUP signal handler. + +* Tue Jul 18 2017 Charalampos Stratakis - 3.6.2-1 +- Update to Python 3.6.2 + +* Tue Jun 27 2017 Charalampos Stratakis - 3.6.1-10 +- Update to the latest upstream implementation of PEP 538 + +* Mon Jun 26 2017 Michal Cyprian - 3.6.1-9 +- Make pip and distutils in user environment install into separate location + +* Fri Jun 23 2017 Charalampos Stratakis - 3.6.1-8 +- Fix test_alpn_protocols from test_ssl +- Do not require rebundled setuptools dependencies + +* Tue May 16 2017 Tomas Orsava - 3.6.1-7 +- Added a dependency to the devel subpackage on python3-rpm-generators which + have been excised out of rpm-build +- Updated notes on bootstrapping Python on top of this specfile accordingly +- Involves: rhbz#1410631, rhbz#1444925 + +* Tue May 09 2017 Charalampos Stratakis - 3.6.1-6 +- Enable profile guided optimizations for x86_64 and i686 architectures +- Update to a newer implementation of PEP 538 +- Update description to reflect that Python 3 is now the default Python + +* Fri May 05 2017 Charalampos Stratakis - 3.6.1-5 +- Update PEP 538 to the latest upstream implementation + +* Tue Apr 18 2017 Charalampos Stratakis - 3.6.1-4 +- Enable link time optimizations +- Move windows executables to the devel subpackage (rhbz#1426257) + +* Thu Apr 13 2017 Tomas Orsava - 3.6.1-3 +- Rename python3.Xdm-config script from -debug to be arch specific +Resolves: rhbz#1179073 + +* Wed Apr 05 2017 Charalampos Stratakis - 3.6.1-2 +- Install the Makefile in its proper location (rhbz#1438219) + +* Wed Mar 22 2017 Iryna Shcherbina - 3.6.1-1 +- Update to version 3.6.1 final + +* Tue Mar 21 2017 Tomas Orsava - 3.6.1-0.2.rc1 +- Fix syntax error in %%py_byte_compile macro (rhbz#1433569) + +* Thu Mar 16 2017 Iryna Shcherbina - 3.6.1-0.1.rc1 +- Update to Python 3.6.1 release candidate 1 +- Add patch 264 to skip a known test failure on aarch64 + +* Fri Mar 10 2017 Charalampos Stratakis - 3.6.0-21 +- Use proper command line parsing in _testembed +- Backport of PEP 538: Coercing the legacy C locale to a UTF-8 based locale + https://fedoraproject.org/wiki/Changes/python3_c.utf-8_locale + +* Mon Feb 27 2017 Charalampos Stratakis - 3.6.0-20 +- Add desktop entry and appdata.xml file for IDLE 3 (rhbz#1392049) + +* Fri Feb 24 2017 Michal Cyprian - 3.6.0-19 +- Revert "Set values of prefix and exec_prefix to /usr/local for + /usr/bin/python* executables..." to prevent build failures + of packages using alternate build tools + +* Tue Feb 21 2017 Michal Cyprian - 3.6.0-18 +- Set values of prefix and exec_prefix to /usr/local for + /usr/bin/python* executables +- Use new %%_module_build macro + +* Fri Feb 17 2017 Michal Cyprian - 3.6.0-13 +- Add --executable option to install.py command + +* Wed Feb 15 2017 Charalampos Stratakis - 3.6.0-12 +- BuildRequire the new dependencies of setuptools when rewheel mode is enabled +in order for the virtualenvs to work properly + +* Sat Feb 11 2017 Fedora Release Engineering - 3.6.0-11 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_26_Mass_Rebuild + +* Wed Feb 01 2017 Stephen Gallagher - 3.6.0-10 +- Add missing %%license macro + +* Thu Jan 26 2017 Tomas Orsava - 3.6.0-9 +- Modify the runtime dependency of python3-libs on system-python-libs again, + because previous attempt didn't work properly with dnf resolving mechanism + +* Wed Jan 25 2017 Tomas Orsava - 3.6.0-8 +- Modify the runtime dependency of python3-libs on system-python-libs to use + just the version and release number, but not the dist tag due to Modularity + +* Mon Jan 16 2017 Charalampos Stratakis - 3.6.0-7 +- Fix error check, so that Random.seed actually uses OS randomness (rhbz#1412275) +- Skip test_aead_aes_gcm during rpmbuild + +* Thu Jan 12 2017 Igor Gnatenko - 3.6.0-6 +- Rebuild for readline 7.x + +* Tue Jan 10 2017 Charalampos Stratakis - 3.6.0-5 +- Require glibc >= 2.24.90-26 for system-python-libs (rhbz#1410644) + +* Mon Jan 09 2017 Charalampos Stratakis - 3.6.0-4 +- Define HAVE_LONG_LONG as 1 for backwards compatibility + +* Thu Jan 05 2017 Miro Hrončok - 3.6.0-3 +- Don't blow up on EL7 kernel (random generator) (rhbz#1410175) + +* Tue Dec 27 2016 Charalampos Stratakis - 3.6.0-1 +- Update to Python 3.6.0 final + +* Fri Dec 09 2016 Charalampos Stratakis - 3.6.0-0.6.rc1 +- Enable rewheel + +* Wed Dec 07 2016 Charalampos Stratakis - 3.6.0-0.5.rc1 +- Update to Python 3.6.0 release candidate 1 + +* Mon Dec 05 2016 Charalampos Stratakis - 3.6.0-0.4.b4 +- Update to Python 3.6.0 beta 4 + +* Mon Dec 05 2016 Charalampos Stratakis - 3.5.2-7 +- Set to work with pip version 9.0.1 + +* Wed Oct 12 2016 Charalampos Stratakis - 3.5.2-6 +- Use proper patch numbering and base upstream branch for +porting ssl and hashlib modules to OpenSSL 1.1.0 +- Drop hashlib patch for now +- Add riscv64 arch to 64bit and no-valgrind arches + +* Tue Oct 11 2016 Tomáš Mráz - 3.5.2-5 +- Make it build with OpenSSL-1.1.0 based on upstream patch + +* Wed Sep 14 2016 Charalampos Stratakis - 3.5.2-4 +- Obsolete and Provide python35 package + +* Mon Sep 12 2016 Charalampos Stratakis - 3.5.2-3 +- Update %py_byte_compile macro +- Remove unused configure flags (rhbz#1374357) + +* Fri Sep 09 2016 Tomas Orsava - 3.5.2-2 +- Updated .pyc 'bytecompilation with the newly installed interpreter' to also + recompile optimized .pyc files +- Removed .pyo 'bytecompilation with the newly installed interpreter', as .pyo + files are no more +- Resolves rhbz#1373635 + +* Mon Aug 15 2016 Tomas Orsava - 3.5.2-1 +- Rebased to version 3.5.2 +- Set to work with pip version 8.1.2 +- Removed patches 207, 237, 241 as fixes are already contained in Python 3.5.2 +- Removed arch or environment specific patches 194, 196, 203, and 208 + as test builds indicate they are no longer needed +- Updated patches 102, 146, and 242 to work with the new Python codebase +- Removed patches 200, 201, 5000 which weren't even being applied + +* Tue Aug 09 2016 Charalampos Stratakis - 3.5.1-15 +- Fix for CVE-2016-1000110 HTTPoxy attack +- SPEC file cleanup + +* Mon Aug 01 2016 Michal Toman - 3.5.1-14 +- Build properly on MIPS + +* Tue Jul 19 2016 Fedora Release Engineering - 3.5.1-13 +- https://fedoraproject.org/wiki/Changes/Automatic_Provides_for_Python_RPM_Packages + +* Fri Jul 08 2016 Charalampos Stratakis - 3.5.1-12 +- Refactor patch for properly fixing CVE-2016-5636 + +* Fri Jul 08 2016 Charalampos Stratakis - 3.5.1-11 +- Fix test_pyexpat failure with Expat version of 2.2.0 + +* Fri Jul 08 2016 Miro Hrončok - 3.5.1-10 +- Move xml module to system-python-libs + +* Thu Jun 16 2016 Tomas Orsava - 3.5.1-9 +- Fix for: CVE-2016-0772 python: smtplib StartTLS stripping attack +- Raise an error when STARTTLS fails +- rhbz#1303647: https://bugzilla.redhat.com/show_bug.cgi?id=1303647 +- rhbz#1346345: https://bugzilla.redhat.com/show_bug.cgi?id=1346345 +- Fixed upstream: https://hg.python.org/cpython/rev/d590114c2394 + +* Mon Jun 13 2016 Charalampos Stratakis - 3.5.1-8 +- Added patch for fixing possible integer overflow and heap corruption in zipimporter.get_data() + +* Fri Mar 04 2016 Miro Hrončok - 3.5.1-7 +- Move distutils to system-python-libs + +* Wed Feb 24 2016 Robert Kuska - 3.5.1-6 +- Provide python3-enum34 + +* Fri Feb 19 2016 Miro Hrončok - 3.5.1-5 +- Provide System Python packages and macros + +* Thu Feb 04 2016 Fedora Release Engineering - 3.5.1-4 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_24_Mass_Rebuild + +* Wed Jan 13 2016 Orion Poplwski - 3.5.1-2 +- Drop python3 macros, require python/python3-rpm-macros + +* Mon Dec 14 2015 Robert Kuska - 3.5.1-1 +- Update to 3.5.1 +- Removed patch 199 and 207 (upstream) + +* Sun Nov 15 2015 Robert Kuska - 3.5.0-5 +- Remove versioned libpython from devel package + +* Fri Nov 13 2015 Than Ngo 3.5.0-4 +- add correct arch for ppc64/ppc64le to fix build failure + +* Wed Nov 11 2015 Robert Kuska - 3.5.0-3 +- Hide the private _Py_atomic_xxx symbols from public header + +* Wed Oct 14 2015 Robert Kuska - 3.5.0-2 +- Rebuild with wheel set to 1 + +* Tue Sep 15 2015 Matej Stuchlik - 3.5.0-1 +- Update to 3.5.0 + +* Mon Jun 29 2015 Thomas Spura - 3.4.3-4 +- python3-devel: Require python-macros for version independant macros such as + python_provide. See fpc#281 and fpc#534. + +* Thu Jun 18 2015 Fedora Release Engineering - 3.4.3-3 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_23_Mass_Rebuild + +* Wed Jun 17 2015 Matej Stuchlik - 3.4.3-4 +- Use 1024bit DH key in test_ssl +- Use -O0 when compiling -debug build +- Update pip version variable to the version we actually ship + +* Wed Jun 17 2015 Matej Stuchlik - 3.4.3-3 +- Make relocating Python by changing _prefix actually work +Resolves: rhbz#1231801 + +* Mon May 4 2015 Peter Robinson 3.4.3-2 +- Disable test_gdb on aarch64 (rhbz#1196181), it joins all other non x86 arches + +* Thu Mar 12 2015 Matej Stuchlik - 3.4.3-1 +- Updated to 3.4.3 +- BuildPython now accepts additional build options +- Temporarily disabled test_gdb on arm (rhbz#1196181) + +* Wed Feb 25 2015 Matej Stuchlik - 3.4.2-7 +- Fixed undefined behaviour in faulthandler which caused test to hang on x86_64 + (http://bugs.python.org/issue23433) + +* Sat Feb 21 2015 Till Maas - 3.4.2-6 +- Rebuilt for Fedora 23 Change + https://fedoraproject.org/wiki/Changes/Harden_all_packages_with_position-independent_code + +* Tue Feb 17 2015 Ville Skyttä - 3.4.2-5 +- Own systemtap dirs (#710733) + +* Mon Jan 12 2015 Dan Horák - 3.4.2-4 +- build with valgrind on ppc64le +- disable test_gdb on s390(x) until rhbz#1181034 is resolved + +* Tue Dec 16 2014 Robert Kuska - 3.4.2-3 +- New patches: 170 (gc asserts), 200 (gettext headers), + 201 (gdbm memory leak) + +* Thu Dec 11 2014 Robert Kuska - 3.4.2-2 +- OpenSSL disabled SSLv3 in SSLv23 method + +* Thu Nov 13 2014 Matej Stuchlik - 3.4.2-1 +- Update to 3.4.2 +- Refreshed patches: 156 (gdb autoload) +- Removed: 195 (Werror declaration), 197 (CVE-2014-4650) + +* Mon Nov 03 2014 Slavek Kabrda - 3.4.1-16 +- Fix CVE-2014-4650 - CGIHTTPServer URL handling +Resolves: rhbz#1113529 + +* Sun Sep 07 2014 Karsten Hopp 3.4.1-15 +- exclude test_gdb on ppc* (rhbz#1132488) + +* Thu Aug 21 2014 Slavek Kabrda - 3.4.1-14 +- Update rewheel patch with fix from https://github.com/bkabrda/rewheel/pull/1 + +* Sun Aug 17 2014 Fedora Release Engineering - 3.4.1-13 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_21_22_Mass_Rebuild + +* Sun Jun 8 2014 Peter Robinson 3.4.1-12 +- aarch64 has valgrind, just list those that don't support it + +* Sun Jun 08 2014 Fedora Release Engineering - 3.4.1-11 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_21_Mass_Rebuild + +* Wed Jun 04 2014 Karsten Hopp 3.4.1-10 +- bump release and rebuild to link with the correct tcl/tk libs on ppcle + +* Tue Jun 03 2014 Matej Stuchlik - 3.4.1-9 +- Change paths to bundled projects in rewheel patch + +* Fri May 30 2014 Miro Hrončok - 3.4.1-8 +- In config script, use uname -m to write the arch + +* Thu May 29 2014 Dan Horák - 3.4.1-7 +- update the arch list where valgrind exists - %%power64 includes also + ppc64le which is not supported yet + +* Thu May 29 2014 Miro Hrončok - 3.4.1-6 +- Forward arguments to the arch specific config script +Resolves: rhbz#1102683 + +* Wed May 28 2014 Miro Hrončok - 3.4.1-5 +- Rename python3.Xm-config script to arch specific. +Resolves: rhbz#1091815 + +* Tue May 27 2014 Bohuslav Kabrda - 3.4.1-4 +- Use python3-*, not python-* runtime requires on setuptools and pip +- rebuild for tcl-8.6 + +* Tue May 27 2014 Matej Stuchlik - 3.4.1-3 +- Update the rewheel module + +* Mon May 26 2014 Miro Hrončok - 3.4.1-2 +- Fix multilib dependencies. +Resolves: rhbz#1091815 + +* Sun May 25 2014 Matej Stuchlik - 3.4.1-1 +- Update to Python 3.4.1 + +* Sun May 25 2014 Matej Stuchlik - 3.4.0-8 +- Fix test_gdb failure on ppc64le +Resolves: rhbz#1095355 + +* Thu May 22 2014 Miro Hrončok - 3.4.0-7 +- Add macro %%python3_version_nodots + +* Sun May 18 2014 Matej Stuchlik - 3.4.0-6 +- Disable test_faulthandler, test_gdb on aarch64 +Resolves: rhbz#1045193 + +* Fri May 16 2014 Matej Stuchlik - 3.4.0-5 +- Don't add Werror=declaration-after-statement for extension + modules through setup.py (PyBT#21121) + +* Mon May 12 2014 Matej Stuchlik - 3.4.0-4 +- Add setuptools and pip to Requires + +* Tue Apr 29 2014 Matej Stuchlik - 3.4.0-3 +- Point __os_install_post to correct brp-* files + +* Tue Apr 15 2014 Matej Stuchlik - 3.4.0-2 +- Temporarily disable tests requiring SIGHUP (rhbz#1088233) + +* Tue Apr 15 2014 Matej Stuchlik - 3.4.0-1 +- Update to Python 3.4 final +- Add patch adding the rewheel module +- Merge patches from master + +* Wed Jan 08 2014 Bohuslav Kabrda - 3.4.0-0.1.b2 +- Update to Python 3.4 beta 2. +- Refreshed patches: 55 (systemtap), 146 (hashlib-fips), 154 (test_gdb noise) +- Dropped patches: 114 (statvfs constants), 177 (platform unicode) + +* Mon Nov 25 2013 Bohuslav Kabrda - 3.4.0-0.1.b1 +- Update to Python 3.4 beta 1. +- Refreshed patches: 102 (lib64), 111 (no static lib), 125 (less verbose COUNT +ALLOCS), 141 (fix COUNT_ALLOCS in test_module), 146 (hashlib fips), +157 (UID+GID overflows), 173 (ENOPROTOOPT in bind_port) +- Removed patch 00187 (remove pthread atfork; upstreamed) + +* Mon Nov 04 2013 Bohuslav Kabrda - 3.4.0-0.1.a4 +- Update to Python 3.4 alpha 4. +- Refreshed patches: 55 (systemtap), 102 (lib64), 111 (no static lib), +114 (statvfs flags), 132 (unittest rpmbuild hooks), 134 (fix COUNT_ALLOCS in +test_sys), 143 (tsc on ppc64), 146 (hashlib fips), 153 (test gdb noise), +157 (UID+GID overflows), 173 (ENOPROTOOPT in bind_port), 186 (dont raise +from py_compile) +- Removed patches: 129 (test_subprocess nonreadable dir - no longer fails in +Koji), 142 (the mock issue that caused this is fixed) +- Added patch 187 (remove thread atfork) - will be in next version +- Refreshed script for checking pyc and pyo timestamps with new ignored files. +- The fips patch is disabled for now until upstream makes a final decision +what to do with sha3 implementation for 3.4.0. + +* Wed Oct 30 2013 Bohuslav Kabrda - 3.3.2-7 +- Bytecompile all *.py files properly during build (rhbz#1023607) + +* Fri Aug 23 2013 Matej Stuchlik - 3.3.2-6 +- Added fix for CVE-2013-4238 (rhbz#996399) + +* Fri Jul 26 2013 Dennis Gilmore - 3.3.2-5 +- fix up indentation in arm patch + +* Fri Jul 26 2013 Dennis Gilmore - 3.3.2-4 +- disable a test that fails on arm +- enable valgrind support on arm arches + +* Tue Jul 02 2013 Bohuslav Kabrda - 3.3.2-3 +- Fix build with libffi containing multilib wrapper for ffi.h (rhbz#979696). + +* Mon May 20 2013 Bohuslav Kabrda - 3.3.2-2 +- Add patch for CVE-2013-2099 (rhbz#963261). + +* Thu May 16 2013 Bohuslav Kabrda - 3.3.2-1 +- Updated to Python 3.3.2. +- Refreshed patches: 153 (gdb test noise) +- Dropped patches: 175 (configure -Wformat, fixed upstream), 182 (gdb +test threads) +- Synced patch numbers with python.spec. + +* Thu May 9 2013 David Malcolm - 3.3.1-4 +- fix test.test_gdb.PyBtTests.test_threads on ppc64 (patch 181; rhbz#960010) + +* Thu May 02 2013 Bohuslav Kabrda - 3.3.1-3 +- Add patch that enables building on ppc64p7 (replace the sed, so that +we get consistent with python2 spec and it's more obvious that we're doing it. + +* Wed Apr 24 2013 Bohuslav Kabrda - 3.3.1-2 +- Add fix for gdb tests failing on arm, rhbz#951802. + +* Tue Apr 09 2013 Bohuslav Kabrda - 3.3.1-1 +- Updated to Python 3.3.1. +- Refreshed patches: 55 (systemtap), 111 (no static lib), 146 (hashlib fips), +153 (fix test_gdb noise), 157 (uid, gid overflow - fixed upstream, just +keeping few more downstream tests) +- Removed patches: 3 (audiotest.au made it to upstream tarball) +- Removed workaround for http://bugs.python.org/issue14774, discussed in +http://bugs.python.org/issue15298 and fixed in revision 24d52d3060e8. + +* Mon Mar 25 2013 David Malcolm - 3.3.0-10 +- fix gcc 4.8 incompatibility (rhbz#927358); regenerate autotool intermediates + +* Mon Mar 25 2013 David Malcolm - 3.3.0-9 +- renumber patches to keep them in sync with python.spec + +* Fri Mar 15 2013 Toshio Kuratomi - 3.3.0-8 +- Fix error in platform.platform() when non-ascii byte strings are decoded to + unicode (rhbz#922149) + +* Thu Mar 14 2013 Toshio Kuratomi - 3.3.0-7 +- Fix up shared library extension (rhbz#889784) + +* Thu Mar 07 2013 Karsten Hopp 3.3.0-6 +- add ppc64p7 build target, optimized for Power7 + +* Mon Mar 4 2013 David Malcolm - 3.3.0-5 +- add workaround for ENOPROTOOPT seen running selftests in Koji +(rhbz#913732) + +* Mon Mar 4 2013 David Malcolm - 3.3.0-4 +- remove config flag from /etc/rpm/macros.{python3|pybytecompile} + +* Mon Feb 11 2013 David Malcolm - 3.3.0-3 +- add aarch64 (rhbz#909783) + +* Thu Nov 29 2012 David Malcolm - 3.3.0-2 +- add BR on bluez-libs-devel (rhbz#879720) + +* Sat Sep 29 2012 David Malcolm - 3.3.0-1 +- 3.3.0rc3 -> 3.3.0; drop alphatag + +* Mon Sep 24 2012 David Malcolm - 3.3.0-0.6.rc3 +- 3.3.0rc2 -> 3.3.0rc3 + +* Mon Sep 10 2012 David Malcolm - 3.3.0-0.5.rc2 +- 3.3.0rc1 -> 3.3.0rc2; refresh patch 55 + +* Mon Aug 27 2012 David Malcolm - 3.3.0-0.4.rc1 +- 3.3.0b2 -> 3.3.0rc1; refresh patches 3, 55 + +* Mon Aug 13 2012 David Malcolm - 3.3.0-0.3.b2 +- 3.3b1 -> 3.3b2; drop upstreamed patch 152; refresh patches 3, 102, 111, +134, 153, 160; regenenerate autotools patch; rework systemtap patch to work +correctly when LANG=C (patch 55); importlib.test was moved to +test.test_importlib upstream + +* Mon Aug 13 2012 Karsten Hopp 3.3.0-0.2.b1 +- disable some failing checks on PPC* (rhbz#846849) + +* Fri Aug 3 2012 David Malcolm - 3.3.0-0.1.b1 +- 3.2 -> 3.3: https://fedoraproject.org/wiki/Features/Python_3.3 +- 3.3.0b1: refresh patches 3, 55, 102, 111, 113, 114, 134, 157; drop upstream +patch 147; regenenerate autotools patch; drop "--with-wide-unicode" from +configure (PEP 393); "plat-linux2" -> "plat-linux" (upstream issue 12326); +"bz2" -> "_bz2" and "crypt" -> "_crypt"; egg-info files are no longer shipped +for stdlib (upstream issues 10645 and 12218); email/test moved to +test/test_email; add /usr/bin/pyvenv[-3.3] and venv module (PEP 405); add +_decimal and _lzma modules; make collections modules explicit in payload again +(upstream issue 11085); add _testbuffer module to tests subpackage (added in +upstream commit 3f9b3b6f7ff0); fix test failures (patches 160 and 161); +workaround erroneously shared _sysconfigdata.py upstream issue #14774; fix +distutils.sysconfig traceback (patch 162); add BuildRequires: xz-devel (for +_lzma module); skip some tests within test_socket (patch 163) + +* Sat Jul 21 2012 Fedora Release Engineering - 3.2.3-11 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_18_Mass_Rebuild + +* Fri Jul 20 2012 David Malcolm - 3.3.0-0.1.b1 + +* Fri Jun 22 2012 David Malcolm - 3.2.3-10 +- use macro for power64 (rhbz#834653) + +* Mon Jun 18 2012 David Malcolm - 3.2.3-9 +- fix missing include in uid/gid handling patch (patch 157; rhbz#830405) + +* Wed May 30 2012 Bohuslav Kabrda - 3.2.3-8 +- fix tapset for debug build + +* Tue May 15 2012 David Malcolm - 3.2.3-7 +- update uid/gid handling to avoid int overflows seen with uid/gid +values >= 2^31 on 32-bit architectures (patch 157; rhbz#697470) + +* Fri May 4 2012 David Malcolm - 3.2.3-6 +- renumber autotools patch from 300 to 5000 +- specfile cleanups + +* Mon Apr 30 2012 David Malcolm - 3.2.3-5 +- fix test_gdb.py (patch 156; rhbz#817072) + +* Fri Apr 20 2012 David Malcolm - 3.2.3-4 +- avoid allocating thunks in ctypes unless absolutely necessary, to avoid +generating SELinux denials on "import ctypes" and "import uuid" when embedding +Python within httpd (patch 155; rhbz#814391) + +* Fri Apr 20 2012 David Malcolm - 3.2.3-3 +- add explicit version requirements on expat to avoid linkage problems with +XML_SetHashSalt + +* Thu Apr 12 2012 David Malcolm - 3.2.3-2 +- fix test_gdb (patch 153) + +* Wed Apr 11 2012 David Malcolm - 3.2.3-1 +- 3.2.3; refresh patch 102 (lib64); drop upstream patches 148 (gdbm magic +values), 149 (__pycache__ fix); add patch 152 (test_gdb regex) + +* Thu Feb 9 2012 Thomas Spura - 3.2.2-13 +- use newly installed python for byte compiling (now for real) + +* Sun Feb 5 2012 Thomas Spura - 3.2.2-12 +- use newly installed python for byte compiling (#787498) + +* Wed Jan 4 2012 Ville Skyttä - 3.2.2-11 +- Build with $RPM_LD_FLAGS (#756863). +- Use xz-compressed source tarball. + +* Wed Dec 07 2011 Karsten Hopp 3.2.2-10 +- disable rAssertAlmostEqual in test_cmath on PPC (#750811) + +* Mon Oct 17 2011 Rex Dieter - 3.2.2-9 +- python3-devel missing autogenerated pkgconfig() provides (#746751) + +* Mon Oct 10 2011 David Malcolm - 3.2.2-8 +- cherrypick fix for distutils not using __pycache__ when byte-compiling +files (rhbz#722578) + +* Fri Sep 30 2011 David Malcolm - 3.2.2-7 +- re-enable gdbm (patch 148; rhbz#742242) + +* Fri Sep 16 2011 David Malcolm - 3.2.2-6 +- add a sys._debugmallocstats() function (patch 147) + +* Wed Sep 14 2011 David Malcolm - 3.2.2-5 +- support OpenSSL FIPS mode in _hashlib and hashlib; don't build the _md5 and +_sha* modules, relying on _hashlib in hashlib (rhbz#563986; patch 146) + +* Tue Sep 13 2011 David Malcolm - 3.2.2-4 +- disable gdbm module to prepare for gdbm soname bump + +* Mon Sep 12 2011 David Malcolm - 3.2.2-3 +- renumber and rename patches for consistency with python.spec (8 to 55, 106 +to 104, 6 to 111, 104 to 113, 105 to 114, 125, 131, 130 to 143) + +* Sat Sep 10 2011 David Malcolm - 3.2.2-2 +- rewrite of "check", introducing downstream-only hooks for skipping specific +cases in an rpmbuild (patch 132), and fixing/skipping failing tests in a more +fine-grained manner than before; (patches 106, 133-142 sparsely, moving +patches for consistency with python.spec: 128 to 134, 126 to 135, 127 to 141) + +* Tue Sep 6 2011 David Malcolm - 3.2.2-1 +- 3.2.2 + +* Thu Sep 1 2011 David Malcolm - 3.2.1-7 +- run selftests with "--verbose" +- disable parts of test_io on ppc (rhbz#732998) + +* Wed Aug 31 2011 David Malcolm - 3.2.1-6 +- use "--findleaks --verbose3" when running test suite + +* Tue Aug 23 2011 David Malcolm - 3.2.1-5 +- re-enable and fix the --with-tsc option on ppc64, and rework it on 32-bit +ppc to avoid aliasing violations (patch 130; rhbz#698726) + +* Tue Aug 23 2011 David Malcolm - 3.2.1-4 +- don't use --with-tsc on ppc64 debug builds (rhbz#698726) + +* Thu Aug 18 2011 David Malcolm - 3.2.1-3 +- add %%python3_version to the rpm macros (rhbz#719082) + +* Mon Jul 11 2011 Dennis Gilmore - 3.2.1-2 +- disable some tests on sparc arches + +* Mon Jul 11 2011 David Malcolm - 3.2.1-1 +- 3.2.1; refresh lib64 patch (102), subprocess unit test patch (129), disabling +of static library build (due to Modules/_testembed; patch 6), autotool +intermediates (patch 300) + +* Fri Jul 8 2011 David Malcolm - 3.2-5 +- use the gdb hooks from the upstream tarball, rather than keeping our own copy + +* Fri Jul 8 2011 David Malcolm - 3.2-4 +- don't run test_openpty and test_pty in %%check + +* Fri Jul 8 2011 David Malcolm - 3.2-3 +- cleanup of BuildRequires; add comment headings to specfile sections + +* Tue Apr 19 2011 David Malcolm - 3.2-2 +- fix the libpython.stp systemtap tapset (rhbz#697730) + +* Mon Feb 21 2011 David Malcolm - 3.2-1 +- 3.2 +- drop alphatag +- regenerate autotool patch + +* Mon Feb 14 2011 David Malcolm - 3.2-0.13.rc3 +- add a /usr/bin/python3-debug symlink within the debug subpackage + +* Mon Feb 14 2011 David Malcolm - 3.2-0.12.rc3 +- 3.2rc3 +- regenerate autotool patch + +* Wed Feb 09 2011 Fedora Release Engineering - 3.2-0.11.rc2 +- Rebuilt for https://fedoraproject.org/wiki/Fedora_15_Mass_Rebuild + +* Mon Jan 31 2011 David Malcolm - 3.2-0.10.rc2 +- 3.2rc2 + +* Mon Jan 17 2011 David Malcolm - 3.2-0.9.rc1 +- 3.2rc1 +- rework patch 6 (static lib removal) +- remove upstreamed patch 130 (ppc debug build) +- regenerate patch 300 (autotool intermediates) +- updated packaging to reflect upstream rewrite of "Demo" (issue 7962) +- added libpython3.so and 2to3-3.2 + +* Wed Jan 5 2011 David Malcolm - 3.2-0.8.b2 +- set EXTRA_CFLAGS to our CFLAGS, rather than overriding OPT, fixing a linker +error with dynamic annotations (when configured using --with-valgrind) +- fix the ppc build of the debug configuration (patch 130; rhbz#661510) + +* Tue Jan 4 2011 David Malcolm - 3.2-0.7.b2 +- add --with-valgrind to configuration (on architectures that support this) + +* Wed Dec 29 2010 David Malcolm - 3.2-0.6.b2 +- work around test_subprocess failure seen in koji (patch 129) + +* Tue Dec 28 2010 David Malcolm - 3.2-0.5.b2 +- 3.2b2 +- rework patch 3 (removal of mimeaudio tests), patch 6 (no static libs), +patch 8 (systemtap), patch 102 (lib64) +- remove patch 4 (rendered redundant by upstream r85537), patch 103 (PEP 3149), +patch 110 (upstreamed expat fix), patch 111 (parallel build fix for grammar +fixed upstream) +- regenerate patch 300 (autotool intermediates) +- workaround COUNT_ALLOCS weakref issues in test suite (patch 126, patch 127, +patch 128) +- stop using runtest.sh in %%check (dropped by upstream), replacing with +regrtest; fixup list of failing tests +- introduce "pyshortver", "SOABI_optimized" and "SOABI_debug" macros +- rework manifests of shared libraries to use "SOABI_" macros, reflecting +PEP 3149 +- drop itertools, operator and _collections modules from the manifests as py3k +commit r84058 moved these inside libpython; json/tests moved to test/json_tests +- move turtle code into the tkinter subpackage + +* Wed Nov 17 2010 David Malcolm - 3.2-0.5.a1 +- fix sysconfig to not rely on the -devel subpackage (rhbz#653058) + +* Thu Sep 9 2010 David Malcolm - 3.2-0.4.a1 +- move most of the content of the core package to the libs subpackage, given +that the libs aren't meaningfully usable without the standard libraries + +* Wed Sep 8 2010 David Malcolm - 3.2-0.3.a1 +- Move test.support to core package (rhbz#596258) +- Add various missing __pycache__ directories to payload + +* Sun Aug 22 2010 Toshio Kuratomi - 3.2-0.2.a1 +- Add __pycache__ directory for site-packages + +* Sun Aug 22 2010 Thomas Spura - 3.2-0.1.a1 +- on 64bit "stdlib" was still "/usr/lib/python*" (modify *lib64.patch) +- make find-provides-without-python-sonames.sh 64bit aware + +* Sat Aug 21 2010 David Malcolm - 3.2-0.0.a1 +- 3.2a1; add alphatag +- rework %%files in the light of PEP 3147 (__pycache__) +- drop our configuration patch to Setup.dist (patch 0): setup.py should do a +better job of things, and the %%files explicitly lists our modules (r82746 +appears to break the old way of doing things). This leads to various modules +changing from "foomodule.so" to "foo.so". It also leads to the optimized build +dropping the _sha1, _sha256 and _sha512 modules, but these are provided by +_hashlib; _weakref becomes a builtin module; xxsubtype goes away (it's only for +testing/devel purposes) +- fixup patches 3, 4, 6, 8, 102, 103, 105, 111 for the rebase +- remove upstream patches: 7 (system expat), 106, 107, 108 (audioop reformat +plus CVE-2010-1634 and CVE-2010-2089), 109 (CVE-2008-5983) +- add machinery for rebuilding "configure" and friends, using the correct +version of autoconf (patch 300) +- patch the debug build's usage of COUNT_ALLOCS to be less verbose (patch 125) +- "modulator" was removed upstream +- drop "-b" from patch applications affecting .py files to avoid littering the +installation tree + +* Thu Aug 19 2010 Toshio Kuratomi - 3.1.2-13 +- Turn on computed-gotos. +- Fix for parallel make and graminit.c + +* Fri Jul 2 2010 David Malcolm - 3.1.2-12 +- rebuild + +* Fri Jul 2 2010 David Malcolm - 3.1.2-11 +- Fix an incompatibility between pyexpat and the system expat-2.0.1 that led to +a segfault running test_pyexpat.py (patch 110; upstream issue 9054; rhbz#610312) + +* Fri Jun 4 2010 David Malcolm - 3.1.2-10 +- ensure that the compiler is invoked with "-fwrapv" (rhbz#594819) +- reformat whitespace in audioop.c (patch 106) +- CVE-2010-1634: fix various integer overflow checks in the audioop +module (patch 107) +- CVE-2010-2089: further checks within the audioop module (patch 108) +- CVE-2008-5983: the new PySys_SetArgvEx entry point from r81399 (patch 109) + +* Thu May 27 2010 Dan Horák - 3.1.2-9 +- reading the timestamp counter is available only on some arches (see Python/ceval.c) + +* Wed May 26 2010 David Malcolm - 3.1.2-8 +- add flags for statvfs.f_flag to the constant list in posixmodule (i.e. "os") +(patch 105) + +* Tue May 25 2010 David Malcolm - 3.1.2-7 +- add configure-time support for COUNT_ALLOCS and CALL_PROFILE debug options +(patch 104); enable them and the WITH_TSC option within the debug build + +* Mon May 24 2010 David Malcolm - 3.1.2-6 +- build and install two different configurations of Python 3: debug and +standard, packaging the debug build in a new "python3-debug" subpackage +(patch 103) + +* Tue Apr 13 2010 David Malcolm - 3.1.2-5 +- exclude test_http_cookies when running selftests, due to hang seen on +http://koji.fedoraproject.org/koji/taskinfo?taskID=2088463 (cancelled after +11 hours) +- update python-gdb.py from v5 to py3k version submitted upstream + +* Wed Mar 31 2010 David Malcolm - 3.1.2-4 +- update python-gdb.py from v4 to v5 (improving performance and stability, +adding commands) + +* Thu Mar 25 2010 David Malcolm - 3.1.2-3 +- update python-gdb.py from v3 to v4 (fixing infinite recursion on reference +cycles and tracebacks on bytes 0x80-0xff in strings, adding handlers for sets +and exceptions) + +* Wed Mar 24 2010 David Malcolm - 3.1.2-2 +- refresh gdb hooks to v3 (reworking how they are packaged) + +* Sun Mar 21 2010 David Malcolm - 3.1.2-1 +- update to 3.1.2: http://www.python.org/download/releases/3.1.2/ +- drop upstreamed patch 2 (.pyc permissions handling) +- drop upstream patch 5 (fix for the test_tk and test_ttk_* selftests) +- drop upstreamed patch 200 (path-fixing script) + +* Sat Mar 20 2010 David Malcolm - 3.1.1-28 +- fix typo in libpython.stp (rhbz:575336) + +* Fri Mar 12 2010 David Malcolm - 3.1.1-27 +- add pyfuntop.stp example (source 7) +- convert usage of $$RPM_BUILD_ROOT to %%{buildroot} throughout, for +consistency with python.spec + +* Mon Feb 15 2010 Thomas Spura - 3.1.1-26 +- rebuild for new package of redhat-rpm-config (rhbz:564527) +- use 'install -p' when running 'make install' + +* Fri Feb 12 2010 David Malcolm - 3.1.1-25 +- split configure options into multiple lines for easy of editing +- add systemtap static markers (wcohen, mjw, dmalcolm; patch 8), a systemtap +tapset defining "python.function.entry" and "python.function.return" to make +the markers easy to use (dmalcolm; source 5), and an example of using the +tapset to the docs (dmalcolm; source 6) (rhbz:545179) + +* Mon Feb 8 2010 David Malcolm - 3.1.1-24 +- move the -gdb.py file from %%{_libdir}/INSTSONAME-gdb.py to +%%{_prefix}/lib/debug/%%{_libdir}/INSTSONAME.debug-gdb.py to avoid noise from +ldconfig (bug 562980), and which should also ensure it becomes part of the +debuginfo subpackage, rather than the libs subpackage +- introduce %%{py_SOVERSION} and %%{py_INSTSONAME} to reflect the upstream +configure script, and to avoid fragile scripts that try to figure this out +dynamically (e.g. for the -gdb.py change) + +* Mon Feb 8 2010 David Malcolm - 3.1.1-23 +- add gdb hooks for easier debugging (Source 4) + +* Thu Jan 28 2010 David Malcolm - 3.1.1-22 +- update python-3.1.1-config.patch to remove downstream customization of build +of pyexpat and elementtree modules +- add patch adapted from upstream (patch 7) to add support for building against +system expat; add --with-system-expat to "configure" invocation +- remove embedded copies of expat and zlib from source tree during "prep" + +* Mon Jan 25 2010 David Malcolm - 3.1.1-21 +- introduce %%{dynload_dir} macro +- explicitly list all lib-dynload files, rather than dynamically gathering the +payload into a temporary text file, so that we can be sure what we are +shipping +- introduce a macros.pybytecompile source file, to help with packaging python3 +modules (Source3; written by Toshio) +- rename "2to3-3" to "python3-2to3" to better reflect python 3 module packaging +plans + +* Mon Jan 25 2010 David Malcolm - 3.1.1-20 +- change python-3.1.1-config.patch to remove our downstream change to curses +configuration in Modules/Setup.dist, so that the curses modules are built using +setup.py with the downstream default (linking against libncursesw.so, rather +than libncurses.so), rather than within the Makefile; add a test to %%install +to verify the dso files that the curses module is linked against the correct +DSO (bug 539917; changes _cursesmodule.so -> _curses.so) + +* Fri Jan 22 2010 David Malcolm - 3.1.1-19 +- add %%py3dir macro to macros.python3 (to be used during unified python 2/3 +builds for setting up the python3 copy of the source tree) + +* Wed Jan 20 2010 David Malcolm - 3.1.1-18 +- move lib2to3 from -tools subpackage to main package (bug 556667) + +* Sun Jan 17 2010 David Malcolm - 3.1.1-17 +- patch Makefile.pre.in to avoid building static library (patch 6, bug 556092) + +* Fri Jan 15 2010 David Malcolm - 3.1.1-16 +- use the %%{_isa} macro to ensure that the python-devel dependency on python +is for the correct multilib arch (#555943) +- delete bundled copy of libffi to make sure we use the system one + +* Fri Jan 15 2010 David Malcolm - 3.1.1-15 +- fix the URLs output by pydoc so they point at python.org's 3.1 build of the +docs, rather than the 2.6 build + +* Wed Jan 13 2010 David Malcolm - 3.1.1-14 +- replace references to /usr with %%{_prefix}; replace references to +/usr/include with %%{_includedir} (Toshio) + +* Mon Jan 11 2010 David Malcolm - 3.1.1-13 +- fix permission on find-provides-without-python-sonames.sh from 775 to 755 + +* Mon Jan 11 2010 David Malcolm - 3.1.1-12 +- remove build-time requirements on tix and tk, since we already have +build-time requirements on the -devel subpackages for each of these (Thomas +Spura) +- replace usage of %%define with %%global (Thomas Spura) +- remove forcing of CC=gcc as this old workaround for bug 109268 appears to +longer be necessary +- move various test files from the "tools"/"tkinter" subpackages to the "test" +subpackage + +* Thu Jan 7 2010 David Malcolm - 3.1.1-11 +- add %%check section (thanks to Thomas Spura) +- update patch 4 to use correct shebang line +- get rid of stray patch file from buildroot + +* Tue Nov 17 2009 Andrew McNabb - 3.1.1-10 +- switched a few instances of "find |xargs" to "find -exec" for consistency. +- made the description of __os_install_post more accurate. + +* Wed Nov 4 2009 David Malcolm - 3.1.1-9 +- add macros.python3 to the -devel subpackage, containing common macros for use +when packaging python3 modules + +* Tue Nov 3 2009 David Malcolm - 3.1.1-8 +- add a provides of "python(abi)" (see bug 532118) +- fix issues identified by a.badger in package review (bug 526126, comment 39): + - use "3" thoughout metadata, rather than "3.*" + - remove conditional around "pkg-config openssl" + - use standard cleanup of RPM_BUILD_ROOT + - replace hardcoded references to /usr with _prefix macro + - stop removing egg-info files + - use /usr/bin/python3.1 rather than /use/bin/env python3.1 when fixing +up shebang lines + - stop attempting to remove no-longer-present .cvsignore files + - move the post/postun sections above the "files" sections + +* Thu Oct 29 2009 David Malcolm - 3.1.1-7 +- remove commented-away patch 51 (python-2.6-distutils_rpm.patch): the -O1 +flag is used by default in the upstream code +- "Makefile" and the config-32/64.h file are needed by distutils/sysconfig.py +_init_posix(), so we include them in the core package, along with their parent +directories (bug 531901) + +* Tue Oct 27 2009 David Malcolm - 3.1.1-6 +- reword description, based on suggestion by amcnabb +- fix the test_email and test_imp selftests (patch 3 and patch 4 respectively) +- fix the test_tk and test_ttk_* selftests (patch 5) +- fix up the specfile's handling of shebang/perms to avoid corrupting +test_httpservers.py (sed command suggested by amcnabb) + +* Thu Oct 22 2009 David Malcolm - 3.1.1-5 +- fixup importlib/_bootstrap.py so that it correctly handles being unable to +open .pyc files for writing (patch 2, upstream issue 7187) +- actually apply the rpath patch (patch 1) + +* Thu Oct 22 2009 David Malcolm - 3.1.1-4 +- update patch0's setup of the crypt module to link it against libcrypt +- update patch0 to comment "datetimemodule" back out, so that it is built +using setup.py (see Setup, option 3), thus linking it statically against +timemodule.c and thus avoiding a run-time "undefined symbol: +_PyTime_DoubleToTimet" failure on "import datetime" + +* Wed Oct 21 2009 David Malcolm - 3.1.1-3 +- remove executable flag from various files that shouldn't have it +- fix end-of-line encodings +- fix a character encoding + +* Tue Oct 20 2009 David Malcolm - 3.1.1-2 +- disable invocation of brp-python-bytecompile in postprocessing, since +it would be with the wrong version of python (adapted from ivazquez' +python3000 specfile) +- use a custom implementation of __find_provides in order to filter out bogus +provides lines for the various .so modules +- fixup distutils/unixccompiler.py to remove standard library path from rpath +(patch 1, was Patch0 in ivazquez' python3000 specfile) +- split out libraries into a -libs subpackage +- update summaries and descriptions, basing content on ivazquez' specfile +- fixup executable permissions on .py, .xpm and .xbm files, based on work in +ivazquez's specfile +- get rid of DOS batch files +- fixup permissions for shared libraries from non-standard 555 to standard 755 +- move /usr/bin/python*-config to the -devel subpackage +- mark various directories as being documentation + +* Thu Sep 24 2009 Andrew McNabb 3.1.1-1 +- Initial package for Python 3. + diff --git a/sources b/sources new file mode 100644 index 0000000..002f1fb --- /dev/null +++ b/sources @@ -0,0 +1 @@ +SHA512 (Python-3.6.3.tar.xz) = 32f24a3adcb7880003c7ecdc5e53e838e774adda76b308961d8215e28db630b2fa2828097817924c76afa4212b2df3362eb64d4e10f37c0147f512ec5aa8662b diff --git a/systemtap-example.stp b/systemtap-example.stp new file mode 100644 index 0000000..164333a --- /dev/null +++ b/systemtap-example.stp @@ -0,0 +1,19 @@ +/* + Example usage of the Python systemtap tapset to show a nested view of all + Python function calls (and returns) across the whole system. + + Run this using + stap systemtap-example.stp + to instrument all Python processes on the system, or (for example) using + stap systemtap-example.stp -c COMMAND + to instrument a specific program (implemented in Python) +*/ +probe python.function.entry +{ + printf("%s => %s in %s:%d\n", thread_indent(1), funcname, filename, lineno); +} + +probe python.function.return +{ + printf("%s <= %s in %s:%d\n", thread_indent(-1), funcname, filename, lineno); +}