Skip to content
Snippets Groups Projects
Verified Commit 367905c7 authored by Pavel Zhukov's avatar Pavel Zhukov Committed by Andrei Gherzan
Browse files

default.xml: Bump oe-core revision


This update brings few CVE fixes (vim, ncurces, glib, etc) as well as
bind and python updates. We had python updated downstream and bind is
unsupported so the update should not break current API.

Signed-off-by: default avatarPavel Zhukov <pavel.zhukov@huawei.com>
parent 30c45e3b
No related branches found
No related tags found
1 merge request!30flavours/zephyr/local.conf.sample: Bump CONF_VERSION
Showing
with 0 additions and 2462 deletions
From 251347fc970a397a9cd63ed3f87c5e6c52e15187 Mon Sep 17 00:00:00 2001
From: Khem Raj <raj.khem@gmail.com>
Date: Tue, 14 May 2013 15:00:26 -0700
Subject: [PATCH] python3: Add target and native recipes
Upstream-Status: Inappropriate [embedded specific]
02/2015 Rebased for Python 3.4.2
# The proper prefix is inside our staging area.
# Signed-Off: Michael 'Mickey' Lauer <mickey@vanille-media.de>
# Signed-off-by: Phil Blundell <philb@gnu.org>
# Signed-off-by: Khem Raj <raj.khem@gmail.com>
# Signed-off-by: Alejandro Hernandez <alejandro.hernandez@linux.intel.com>
---
Lib/distutils/sysconfig.py | 14 +++++++++++---
1 file changed, 11 insertions(+), 3 deletions(-)
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index 2df348c..4f8db84 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -96,7 +96,9 @@ def get_python_inc(plat_specific=0, prefix=None):
If 'prefix' is supplied, use it instead of sys.base_prefix or
sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
"""
- if prefix is None:
+ if prefix is None and os.environ.get('STAGING_INCDIR', ""):
+ prefix = os.environ['STAGING_INCDIR'].rstrip('include')
+ elif prefix is None:
prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
if os.name == "posix":
if python_build:
@@ -139,7 +141,13 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
If 'prefix' is supplied, use it instead of sys.base_prefix or
sys.base_exec_prefix -- i.e., ignore 'plat_specific'.
"""
- if prefix is None:
+ if os.environ.get('STAGING_LIBDIR', ""):
+ lib_basename = os.environ['STAGING_LIBDIR'].split('/')[-1]
+ else:
+ lib_basename = "lib"
+ if prefix is None and os.environ.get('STAGING_LIBDIR', ""):
+ prefix = os.environ['STAGING_LIBDIR'].rstrip(lib_basename)
+ elif prefix is None:
if standard_lib:
prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX
else:
@@ -147,7 +155,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
if os.name == "posix":
libpython = os.path.join(prefix,
- "lib", "python" + get_python_version())
+ lib_basename, "python" + get_python_version())
if standard_lib:
return libpython
else:
From ba7202700578d435b07cfdfb7b57e83185752800 Mon Sep 17 00:00:00 2001
From: Andrei Gherzan <andrei@gherzan.ro>
Date: Mon, 28 Jan 2019 15:57:54 +0000
Subject: [PATCH] _tkinter module needs tk module along with tcl. tk is not yet
integrated in yocto so we skip the check for this module. Avoid a warning by
not adding this module to missing variable.
Upstream-Status: Inappropriate [distribution]
Also simply disable the tk module since its not in DEPENDS.
Signed-off-by: Andrei Gherzan <andrei@gherzan.ro>
---
setup.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/setup.py b/setup.py
index ab18ff0..7691258 100644
--- a/setup.py
+++ b/setup.py
@@ -1706,8 +1706,8 @@ class PyBuildExt(build_ext):
self.detect_decimal()
self.detect_ctypes()
self.detect_multiprocessing()
- if not self.detect_tkinter():
- self.missing.append('_tkinter')
+# if not self.detect_tkinter():
+# self.missing.append('_tkinter')
self.detect_uuid()
## # Uncomment these lines if you want to play with xxmodule.c
From 62336285cba38017b35cb761c03f0c7e80a671a3 Mon Sep 17 00:00:00 2001
From: Mark Hatle <mark.hatle@windriver.com>
Date: Wed, 21 Sep 2011 20:55:33 -0500
Subject: [PATCH] Lib/cgi.py: Update the script as mentioned in the comment
Upstream-Status: Inappropriate [distribution]
Signed-off-by: Mark Hatle <mark.hatle@windriver.com>
---
Lib/cgi.py | 11 +----------
1 file changed, 1 insertion(+), 10 deletions(-)
diff --git a/Lib/cgi.py b/Lib/cgi.py
index 8cf6687..094c7b4 100755
--- a/Lib/cgi.py
+++ b/Lib/cgi.py
@@ -1,13 +1,4 @@
-#! /usr/local/bin/python
-
-# NOTE: the above "/usr/local/bin/python" is NOT a mistake. It is
-# intentionally NOT "/usr/bin/env python". On many systems
-# (e.g. Solaris), /usr/local/bin is not in $PATH as passed to CGI
-# scripts, and /usr/local/bin is the default directory where Python is
-# installed, so /usr/bin/env would be unable to find python. Granted,
-# binary installations by Linux vendors often install Python in
-# /usr/bin. So let those vendors patch cgi.py to match their choice
-# of installation.
+#! /usr/bin/env python
"""Support module for CGI (Common Gateway Interface) scripts.
#!/usr/bin/env python3
import sys
logfile = open(sys.argv[1]).read()
necessary_bits = logfile.find("The necessary bits to build these optional modules were not found")
to_find_bits = logfile.find("To find the necessary bits, look in setup.py in detect_modules() for the module's name.")
if necessary_bits != -1:
print("%s" %(logfile[necessary_bits:to_find_bits]))
failed_to_build = logfile.find("Failed to build these modules:")
if failed_to_build != -1:
failed_to_build_end = logfile.find("\n\n", failed_to_build)
print("%s" %(logfile[failed_to_build:failed_to_build_end]))
if necessary_bits != -1 or failed_to_build != -1:
sys.exit(1)
# This script is used as a bitbake task to create a new python manifest
# $ bitbake python -c create_manifest
#
# Our goal is to keep python-core as small as posible and add other python
# packages only when the user needs them, hence why we split upstream python
# into several packages.
#
# In a very simplistic way what this does is:
# Launch python and see specifically what is required for it to run at a minimum
#
# Go through the python-manifest file and launch a separate task for every single
# one of the files on each package, this task will check what was required for that
# specific module to run, these modules will be called dependencies.
# The output of such task will be a list of the modules or dependencies that were
# found for that file.
#
# Such output will be parsed by this script, we will look for each dependency on the
# manifest and if we find that another package already includes it, then we will add
# that package as an RDEPENDS to the package we are currently checking; in case we dont
# find the current dependency on any other package we will add it to the current package
# as part of FILES.
#
#
# This way we will create a new manifest from the data structure that was built during
# this process, on this new manifest each package will contain specifically only
# what it needs to run.
#
# There are some caveats which we try to deal with, such as repeated files on different
# packages, packages that include folders, wildcards, and special packages.
# Its also important to note that this method only works for python files, and shared
# libraries. Static libraries, header files and binaries need to be dealt with manually.
#
# This script differs from its python2 version mostly on how shared libraries are handled
# The manifest file for python3 has an extra field which contains the cached files for
# each package.
# Tha method to handle cached files does not work when a module includes a folder which
# itself contains the pycache folder, gladly this is almost never the case.
#
# Author: Alejandro Enedino Hernandez Samaniego "aehs29" <aehs29 at gmail dot com>
import sys
import subprocess
import json
import os
import collections
# Get python version from ${PYTHON_MAJMIN}
pyversion = str(sys.argv[1])
# Hack to get native python search path (for folders), not fond of it but it works for now
pivot = 'recipe-sysroot-native'
for p in sys.path:
if pivot in p:
nativelibfolder = p[:p.find(pivot)+len(pivot)]
# Empty dict to hold the whole manifest
new_manifest = collections.OrderedDict()
# Check for repeated files, folders and wildcards
allfiles = []
repeated = []
wildcards = []
hasfolders = []
allfolders = []
def isFolder(value):
value = value.replace('${PYTHON_MAJMIN}',pyversion)
if os.path.isdir(value.replace('${libdir}',nativelibfolder+'/usr/lib')) or os.path.isdir(value.replace('${libdir}',nativelibfolder+'/usr/lib64')) or os.path.isdir(value.replace('${libdir}',nativelibfolder+'/usr/lib32')):
return True
else:
return False
def isCached(item):
if '__pycache__' in item:
return True
else:
return False
def prepend_comments(comments, json_manifest):
with open(json_manifest, 'r+') as manifest:
json_contents = manifest.read()
manifest.seek(0, 0)
manifest.write(comments + json_contents)
# Read existing JSON manifest
with open('python3-manifest.json') as manifest:
# The JSON format doesn't allow comments so we hack the call to keep the comments using a marker
manifest_str = manifest.read()
json_start = manifest_str.find('# EOC') + 6 # EOC + \n
manifest.seek(0)
comments = manifest.read(json_start)
manifest_str = manifest.read()
old_manifest = json.loads(manifest_str, object_pairs_hook=collections.OrderedDict)
#
# First pass to get core-package functionality, because we base everything on the fact that core is actually working
# Not exactly the same so it should not be a function
#
print ('Getting dependencies for package: core')
# This special call gets the core dependencies and
# appends to the old manifest so it doesnt hurt what it
# currently holds.
# This way when other packages check for dependencies
# on the new core package, they will still find them
# even when checking the old_manifest
output = subprocess.check_output([sys.executable, 'get_module_deps3.py', 'python-core-package']).decode('utf8')
for coredep in output.split():
coredep = coredep.replace(pyversion,'${PYTHON_MAJMIN}')
if isCached(coredep):
if coredep not in old_manifest['core']['cached']:
old_manifest['core']['cached'].append(coredep)
else:
if coredep not in old_manifest['core']['files']:
old_manifest['core']['files'].append(coredep)
# The second step is to loop through the existing files contained in the core package
# according to the old manifest, identify if they are modules, or some other type
# of file that we cant import (directories, binaries, configs) in which case we
# can only assume they were added correctly (manually) so we ignore those and
# pass them to the manifest directly.
for filedep in old_manifest['core']['files']:
if isFolder(filedep):
if isCached(filedep):
if filedep not in old_manifest['core']['cached']:
old_manifest['core']['cached'].append(filedep)
else:
if filedep not in old_manifest['core']['files']:
old_manifest['core']['files'].append(filedep)
continue
if '${bindir}' in filedep:
if filedep not in old_manifest['core']['files']:
old_manifest['core']['files'].append(filedep)
continue
if filedep == '':
continue
if '${includedir}' in filedep:
if filedep not in old_manifest['core']['files']:
old_manifest['core']['files'].append(filedep)
continue
# Get actual module name , shouldnt be affected by libdir/bindir, etc.
pymodule = os.path.splitext(os.path.basename(os.path.normpath(filedep)))[0]
# We now know that were dealing with a python module, so we can import it
# and check what its dependencies are.
# We launch a separate task for each module for deterministic behavior.
# Each module will only import what is necessary for it to work in specific.
# The output of each task will contain each module's dependencies
print ('Getting dependencies for module: %s' % pymodule)
output = subprocess.check_output([sys.executable, 'get_module_deps3.py', '%s' % pymodule]).decode('utf8')
print ('The following dependencies were found for module %s:\n' % pymodule)
print (output)
for pymodule_dep in output.split():
pymodule_dep = pymodule_dep.replace(pyversion,'${PYTHON_MAJMIN}')
if isCached(pymodule_dep):
if pymodule_dep not in old_manifest['core']['cached']:
old_manifest['core']['cached'].append(pymodule_dep)
else:
if pymodule_dep not in old_manifest['core']['files']:
old_manifest['core']['files'].append(pymodule_dep)
# At this point we are done with the core package.
# The old_manifest dictionary is updated only for the core package because
# all others will use this a base.
# To improve the script speed, we check which packages contain directories
# since we will be looping through (only) those later.
for pypkg in old_manifest:
for filedep in old_manifest[pypkg]['files']:
if isFolder(filedep):
print ('%s is a folder' % filedep)
if pypkg not in hasfolders:
hasfolders.append(pypkg)
if filedep not in allfolders:
allfolders.append(filedep)
# This is the main loop that will handle each package.
# It works in a similar fashion than the step before, but
# we will now be updating a new dictionary that will eventually
# become the new manifest.
#
# The following loops though all packages in the manifest,
# through all files on each of them, and checks whether or not
# they are modules and can be imported.
# If they can be imported, then it checks for dependencies for
# each of them by launching a separate task.
# The output of that task is then parsed and the manifest is updated
# accordingly, wether it should add the module on FILES for the current package
# or if that module already belongs to another package then the current one
# will RDEPEND on it
for pypkg in old_manifest:
# Use an empty dict as data structure to hold data for each package and fill it up
new_manifest[pypkg] = collections.OrderedDict()
new_manifest[pypkg]['summary'] = old_manifest[pypkg]['summary']
new_manifest[pypkg]['rdepends'] = []
new_manifest[pypkg]['files'] = []
new_manifest[pypkg]['cached'] = old_manifest[pypkg]['cached']
# All packages should depend on core
if pypkg != 'core':
new_manifest[pypkg]['rdepends'].append('core')
new_manifest[pypkg]['cached'] = []
print('\n')
print('--------------------------')
print ('Handling package %s' % pypkg)
print('--------------------------')
# Handle special cases, we assume that when they were manually added
# to the manifest we knew what we were doing.
special_packages = ['misc', 'modules', 'dev', 'tests']
if pypkg in special_packages or 'staticdev' in pypkg:
print('Passing %s package directly' % pypkg)
new_manifest[pypkg] = old_manifest[pypkg]
continue
for filedep in old_manifest[pypkg]['files']:
# We already handled core on the first pass, we can ignore it now
if pypkg == 'core':
if filedep not in new_manifest[pypkg]['files']:
new_manifest[pypkg]['files'].append(filedep)
continue
# Handle/ignore what we cant import
if isFolder(filedep):
new_manifest[pypkg]['files'].append(filedep)
# Asyncio (and others) are both the package and the folder name, we should not skip those...
path,mod = os.path.split(filedep)
if mod != pypkg:
continue
if '${bindir}' in filedep:
if filedep not in new_manifest[pypkg]['files']:
new_manifest[pypkg]['files'].append(filedep)
continue
if filedep == '':
continue
if '${includedir}' in filedep:
if filedep not in new_manifest[pypkg]['files']:
new_manifest[pypkg]['files'].append(filedep)
continue
# Get actual module name , shouldnt be affected by libdir/bindir, etc.
# We need to check if the imported module comes from another (e.g. sqlite3.dump)
path,pymodule = os.path.split(filedep)
path = os.path.basename(path)
pymodule = os.path.splitext(os.path.basename(pymodule))[0]
# If this condition is met, it means we need to import it from another module
# or its the folder itself (e.g. unittest)
if path == pypkg:
if pymodule:
pymodule = path + '.' + pymodule
else:
pymodule = path
# We now know that were dealing with a python module, so we can import it
# and check what its dependencies are.
# We launch a separate task for each module for deterministic behavior.
# Each module will only import what is necessary for it to work in specific.
# The output of each task will contain each module's dependencies
print ('\nGetting dependencies for module: %s' % pymodule)
output = subprocess.check_output([sys.executable, 'get_module_deps3.py', '%s' % pymodule]).decode('utf8')
print ('The following dependencies were found for module %s:\n' % pymodule)
print (output)
reportFILES = []
reportRDEPS = []
for pymodule_dep in output.split():
# Warning: This first part is ugly
# One of the dependencies that was found, could be inside of one of the folders included by another package
# We need to check if this happens so we can add the package containing the folder as an rdependency
# e.g. Folder encodings contained in codecs
# This would be solved if no packages included any folders
# This can be done in two ways:
# 1 - We assume that if we take out the filename from the path we would get
# the folder string, then we would check if folder string is in the list of folders
# This would not work if a package contains a folder which contains another folder
# e.g. path/folder1/folder2/filename folder_string= path/folder1/folder2
# folder_string would not match any value contained in the list of folders
#
# 2 - We do it the other way around, checking if the folder is contained in the path
# e.g. path/folder1/folder2/filename folder_string= path/folder1/folder2
# is folder_string inside path/folder1/folder2/filename?,
# Yes, it works, but we waste a couple of milliseconds.
pymodule_dep = pymodule_dep.replace(pyversion,'${PYTHON_MAJMIN}')
inFolders = False
for folder in allfolders:
# The module could have a directory named after it, e.g. xml, if we take out the filename from the path
# we'll end up with ${libdir}, and we want ${libdir}/xml
if isFolder(pymodule_dep):
check_path = pymodule_dep
else:
check_path = os.path.dirname(pymodule_dep)
if folder in check_path :
inFolders = True # Did we find a folder?
folderFound = False # Second flag to break inner for
# Loop only through packages which contain folders
for pypkg_with_folder in hasfolders:
if (folderFound == False):
# print('Checking folder %s on package %s' % (pymodule_dep,pypkg_with_folder))
for folder_dep in old_manifest[pypkg_with_folder]['files'] or folder_dep in old_manifest[pypkg_with_folder]['cached']:
if folder_dep == folder:
print ('%s folder found in %s' % (folder, pypkg_with_folder))
folderFound = True
if pypkg_with_folder not in new_manifest[pypkg]['rdepends'] and pypkg_with_folder != pypkg:
new_manifest[pypkg]['rdepends'].append(pypkg_with_folder)
else:
break
# A folder was found so we're done with this item, we can go on
if inFolders:
continue
# No directories beyond this point
# We might already have this module on the dictionary since it could depend on a (previously checked) module
if pymodule_dep not in new_manifest[pypkg]['files'] and pymodule_dep not in new_manifest[pypkg]['cached']:
# Handle core as a special package, we already did it so we pass it to NEW data structure directly
if pypkg == 'core':
print('Adding %s to %s FILES' % (pymodule_dep, pypkg))
if pymodule_dep.endswith('*'):
wildcards.append(pymodule_dep)
if isCached(pymodule_dep):
new_manifest[pypkg]['cached'].append(pymodule_dep)
else:
new_manifest[pypkg]['files'].append(pymodule_dep)
# Check for repeated files
if pymodule_dep not in allfiles:
allfiles.append(pymodule_dep)
else:
if pymodule_dep not in repeated:
repeated.append(pymodule_dep)
else:
# Last step: Figure out if we this belongs to FILES or RDEPENDS
# We check if this module is already contained on another package, so we add that one
# as an RDEPENDS, or if its not, it means it should be contained on the current
# package, and we should add it to FILES
for possible_rdep in old_manifest:
# Debug
# print('Checking %s ' % pymodule_dep + ' in %s' % possible_rdep)
if pymodule_dep in old_manifest[possible_rdep]['files'] or pymodule_dep in old_manifest[possible_rdep]['cached']:
# Since were nesting, we need to check its not the same pypkg
if(possible_rdep != pypkg):
if possible_rdep not in new_manifest[pypkg]['rdepends']:
# Add it to the new manifest data struct as RDEPENDS since it contains something this module needs
reportRDEPS.append('Adding %s to %s RDEPENDS, because it contains %s\n' % (possible_rdep, pypkg, pymodule_dep))
new_manifest[pypkg]['rdepends'].append(possible_rdep)
break
else:
# Since this module wasnt found on another package, it is not an RDEP,
# so we add it to FILES for this package.
# A module shouldn't contain itself (${libdir}/python3/sqlite3 shouldnt be on sqlite3 files)
if os.path.basename(pymodule_dep) != pypkg:
reportFILES.append(('Adding %s to %s FILES\n' % (pymodule_dep, pypkg)))
if isCached(pymodule_dep):
new_manifest[pypkg]['cached'].append(pymodule_dep)
else:
new_manifest[pypkg]['files'].append(pymodule_dep)
if pymodule_dep.endswith('*'):
wildcards.append(pymodule_dep)
if pymodule_dep not in allfiles:
allfiles.append(pymodule_dep)
else:
if pymodule_dep not in repeated:
repeated.append(pymodule_dep)
print('\n')
print('#################################')
print('Summary for module %s' % pymodule)
print('FILES found for module %s:' % pymodule)
print(''.join(reportFILES))
print('RDEPENDS found for module %s:' % pymodule)
print(''.join(reportRDEPS))
print('#################################')
print('The following FILES contain wildcards, please check if they are necessary')
print(wildcards)
print('The following FILES contain folders, please check if they are necessary')
print(hasfolders)
# Sort it just so it looks nicer
for pypkg in new_manifest:
new_manifest[pypkg]['files'].sort()
new_manifest[pypkg]['cached'].sort()
new_manifest[pypkg]['rdepends'].sort()
# Create the manifest from the data structure that was built
with open('python3-manifest.json.new','w') as outfile:
json.dump(new_manifest,outfile, indent=4)
outfile.write('\n')
prepend_comments(comments,'python3-manifest.json.new')
if (repeated):
error_msg = '\n\nERROR:\n'
error_msg += 'The following files are repeated (contained in more than one package),\n'
error_msg += 'this is likely to happen when new files are introduced after an upgrade,\n'
error_msg += 'please check which package should get it,\n modify the manifest accordingly and re-run the create_manifest task:\n'
error_msg += '\n'.join(repeated)
error_msg += '\n'
sys.exit(error_msg)
configure.ac: add CROSSPYTHONPATH into PYTHONPATH for PYTHON_FOR_BUILD
When building x86->x86 the system will try to execute .so and related items
from the default PYTHONPATH. This will fail if the target CPU contains
instructions that the host CPU does not have, add CROSSPYTHONPATH
into PYTHONPATH so we can prepend the list to find correct libs.
Upstream-Status: Inappropriate [OE-Core integration specific]
Credits-to: Mark Hatle <mark.hatle@windriver.com>
Credits-to: Jackie Huang <jackie.huang@windriver.com>
Signed-off-by: Ricardo Ribalda <ricardo@ribalda.com>
diff --git a/configure.ac b/configure.ac
index 4ab19a6..7036a53 100644
--- a/configure.ac
+++ b/configure.ac
@@ -76,7 +76,7 @@ if test "$cross_compiling" = yes; then
AC_MSG_ERROR([python$PACKAGE_VERSION interpreter not found])
fi
AC_MSG_RESULT($interp)
- PYTHON_FOR_BUILD='_PYTHON_PROJECT_BASE=$(abs_builddir) _PYTHON_HOST_PLATFORM=$(_PYTHON_HOST_PLATFORM) PYTHONPATH=$(shell test -f pybuilddir.txt && echo $(abs_builddir)/`cat pybuilddir.txt`:)$(srcdir)/Lib _PYTHON_SYSCONFIGDATA_NAME=_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH) '$interp
+ PYTHON_FOR_BUILD='_PYTHON_PROJECT_BASE=$(abs_builddir) _PYTHON_HOST_PLATFORM=$(_PYTHON_HOST_PLATFORM) PYTHONPATH=$(CROSSPYTHONPATH):$(shell test -f pybuilddir.txt && echo $(abs_builddir)/`cat pybuilddir.txt`:)$(srcdir)/Lib _PYTHON_SYSCONFIGDATA_NAME=_sysconfigdata_$(ABIFLAGS)_$(MACHDEP)_$(MULTIARCH) '$interp
fi
elif test "$cross_compiling" = maybe; then
AC_MSG_ERROR([Cross compiling required --host=HOST-TUPLE and --build=ARCH])
# This script is launched on separate task for each python module
# It checks for dependencies for that specific module and prints
# them out, the output of this execution will have all dependencies
# for a specific module, which will be parsed an dealt on create_manifest.py
#
# Author: Alejandro Enedino Hernandez Samaniego "aehs29" <aehs29@gmail.com>
# We can get a log per module, for all the dependencies that were found, but its messy.
debug=False
import sys
# We can get a list of the modules which are currently required to run python
# so we run python-core and get its modules, we then import what we need
# and check what modules are currently running, if we substract them from the
# modules we had initially, we get the dependencies for the module we imported.
# We use importlib to achieve this, so we also need to know what modules importlib needs
import importlib
core_deps=set(sys.modules)
def fix_path(dep_path):
import os
# We DONT want the path on our HOST system
pivot='recipe-sysroot-native'
dep_path=dep_path[dep_path.find(pivot)+len(pivot):]
if '/usr/bin' in dep_path:
dep_path = dep_path.replace('/usr/bin''${bindir}')
# Handle multilib, is there a better way?
if '/usr/lib32' in dep_path:
dep_path = dep_path.replace('/usr/lib32','${libdir}')
if '/usr/lib64' in dep_path:
dep_path = dep_path.replace('/usr/lib64','${libdir}')
if '/usr/lib' in dep_path:
dep_path = dep_path.replace('/usr/lib','${libdir}')
if '/usr/include' in dep_path:
dep_path = dep_path.replace('/usr/include','${includedir}')
if '__init__.' in dep_path:
dep_path = os.path.split(dep_path)[0]
return dep_path
# Module to import was passed as an argument
current_module = str(sys.argv[1]).rstrip()
if(debug==True):
log = open('log_%s' % current_module,'w')
log.write('Module %s generated the following dependencies:\n' % current_module)
try:
importlib.import_module('%s' % current_module)
except ImportError as e:
if (debug==True):
log.write('Module was not found')
pass
# Get current module dependencies, dif will contain a list of specific deps for this module
module_deps=set(sys.modules)
# We handle the core package (1st pass on create_manifest.py) as a special case
if current_module == 'python-core-package':
dif = core_deps
else:
# We know this is not the core package, so there must be a difference.
dif = module_deps-core_deps
# Check where each dependency came from
for item in dif:
dep_path=''
try:
if (debug==True):
log.write('Calling: sys.modules[' + '%s' % item + '].__file__\n')
dep_path = sys.modules['%s' % item].__file__
except AttributeError as e:
# Deals with thread (builtin module) not having __file__ attribute
if debug==True:
log.write(item + ' ')
log.write(str(e))
log.write('\n')
pass
except NameError as e:
# Deals with NameError: name 'dep_path' is not defined
# because module is not found (wasn't compiled?), e.g. bddsm
if (debug==True):
log.write(item+' ')
log.write(str(e))
pass
# Site-customize is a special case since we (OpenEmbedded) put it there manually
if 'sitecustomize' in dep_path:
dep_path = '${libdir}/python${PYTHON_MAJMIN}/sitecustomize.py'
# Prints out result, which is what will be used by create_manifest
print (dep_path)
continue
dep_path = fix_path(dep_path)
import sysconfig
soabi=sysconfig.get_config_var('SOABI')
# Check if its a shared library and deconstruct it
if soabi in dep_path:
if (debug==True):
log.write('Shared library found in %s' % dep_path)
dep_path = dep_path.replace(soabi,'*')
print (dep_path)
continue
if (debug==True):
log.write(dep_path+'\n')
# Prints out result, which is what will be used by create_manifest
print (dep_path)
import imp
cpython_tag = imp.get_tag()
cached=''
# Theres no naive way to find *.pyc files on python3
try:
if (debug==True):
log.write('Calling: sys.modules[' + '%s' % item + '].__cached__\n')
cached = sys.modules['%s' % item].__cached__
except AttributeError as e:
# Deals with thread (builtin module) not having __cached__ attribute
if debug==True:
log.write(item + ' ')
log.write(str(e))
log.write('\n')
pass
except NameError as e:
# Deals with NameError: name 'cached' is not defined
if (debug==True):
log.write(item+' ')
log.write(str(e))
pass
if cached is not None:
if (debug==True):
log.write(cached)
cached = fix_path(cached)
cached = cached.replace(cpython_tag,'*')
print (cached)
if debug==True:
log.close()
libainstall installs python-config.py but the .pyc cache files are generated
by the libinstall target. This means some builds may not generate the pyc files
for python-config.py depending on the order things happen in. This means builds
are not always reproducible.
Add a dependency to avoid the race.
Upstream-Status: Pending
Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
Index: Python-3.8.11/Makefile.pre.in
===================================================================
--- Python-3.8.11.orig/Makefile.pre.in
+++ Python-3.8.11/Makefile.pre.in
@@ -1415,7 +1415,7 @@ LIBSUBDIRS= tkinter tkinter/test tkinter
unittest unittest/test unittest/test/testmock \
venv venv/scripts venv/scripts/common venv/scripts/posix \
curses pydoc_data
-libinstall: build_all $(srcdir)/Modules/xxmodule.c
+libinstall: build_all $(srcdir)/Modules/xxmodule.c libainstall
@for i in $(SCRIPTDIR) $(LIBDEST); \
do \
if test ! -d $(DESTDIR)$$i; then \
From 07df0ae0d70cba6d1847fe1c24a71063930bec60 Mon Sep 17 00:00:00 2001
From: Tyler Hall <tylerwhall@gmail.com>
Date: Sun, 4 May 2014 20:06:43 -0400
Subject: [PATCH] python-config: Revert to using distutils.sysconfig
The newer sysconfig module shares some code with distutils.sysconfig, but the same modifications as in
12-distutils-prefix-is-inside-staging-area.patch makes distutils.sysconfig
affect the native runtime as well as cross building. Use the old, patched
implementation which returns paths in the staging directory and for the target,
as appropriate.
Upstream-Status: Inappropriate [Embedded Specific]
Signed-off-by: Tyler Hall <tylerwhall@gmail.com>
:
---
Misc/python-config.in | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/Misc/python-config.in b/Misc/python-config.in
index 727c4a8..c702829 100644
--- a/Misc/python-config.in
+++ b/Misc/python-config.in
@@ -6,7 +6,7 @@
import getopt
import os
import sys
-import sysconfig
+from distutils import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'extension-suffix', 'help', 'abiflags', 'configdir',
@@ -35,14 +35,14 @@ if '--help' in opt_flags:
for opt in opt_flags:
if opt == '--prefix':
- print(sysconfig.get_config_var('prefix'))
+ print(sysconfig.PREFIX)
elif opt == '--exec-prefix':
- print(sysconfig.get_config_var('exec_prefix'))
+ print(sysconfig.EXEC_PREFIX)
elif opt in ('--includes', '--cflags'):
- flags = ['-I' + sysconfig.get_path('include'),
- '-I' + sysconfig.get_path('platinclude')]
+ flags = ['-I' + sysconfig.get_python_inc(),
+ '-I' + sysconfig.get_python_inc(plat_specific=True)]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
#! /usr/bin/env python3
#
# SPDX-License-Identifier: MIT
#
# Copyright 2019 by Garmin Ltd. or its subsidiaries
#
# A script to reformat python sysconfig
import sys
import pprint
l = {}
g = {}
with open(sys.argv[1], 'r') as f:
exec(f.read(), g, l)
with open(sys.argv[1], 'w') as f:
for k in sorted(l.keys()):
f.write('%s = ' % k)
pprint.pprint(l[k], stream=f, width=sys.maxsize)
f.write('\n')
#!/bin/sh
python3 -m test -v | sed -u -e '/\.\.\. ok/ s/^/PASS: /g' -r -e '/\.\.\. (ERROR|FAIL)/ s/^/FAIL: /g' -e '/\.\.\. skipped/ s/^/SKIP: /g' -e 's/ \.\.\. ok//g' -e 's/ \.\.\. ERROR//g' -e 's/ \.\.\. FAIL//g' -e 's/ \.\.\. skipped//g'
SUMMARY = "The Python Programming Language"
HOMEPAGE = "http://www.python.org"
DESCRIPTION = "Python is a programming language that lets you work more quickly and integrate your systems more effectively."
LICENSE = "PSF-2.0 & BSD-0-Clause"
SECTION = "devel/python"
LIC_FILES_CHKSUM = "file://LICENSE;md5=c22d2438294c784731bf9dd224a467b7"
SRC_URI = "http://www.python.org/ftp/python/${PV}/Python-${PV}.tar.xz \
file://run-ptest \
file://create_manifest3.py \
file://get_module_deps3.py \
file://python3-manifest.json \
file://check_build_completeness.py \
file://cgi_py.patch \
file://0001-Do-not-add-usr-lib-termcap-to-linker-flags-to-avoid-.patch \
${@bb.utils.contains('PACKAGECONFIG', 'tk', '', 'file://avoid_warning_about_tkinter.patch', d)} \
file://0001-Do-not-use-the-shell-version-of-python-config-that-w.patch \
file://python-config.patch \
file://0001-Makefile.pre-use-qemu-wrapper-when-gathering-profile.patch \
file://0001-Do-not-hardcode-lib-as-location-for-site-packages-an.patch \
file://0001-python3-use-cc_basename-to-replace-CC-for-checking-c.patch \
file://0001-Lib-sysconfig.py-fix-another-place-where-lib-is-hard.patch \
file://0001-Makefile-fix-Issue36464-parallel-build-race-problem.patch \
file://0001-bpo-36852-proper-detection-of-mips-architecture-for-.patch \
file://crosspythonpath.patch \
file://reformat_sysconfig.py \
file://0001-Use-FLAG_REF-always-for-interned-strings.patch \
file://0001-test_locale.py-correct-the-test-output-format.patch \
file://0017-setup.py-do-not-report-missing-dependencies-for-disa.patch \
file://0001-setup.py-pass-missing-libraries-to-Extension-for-mul.patch \
file://0001-Makefile-do-not-compile-.pyc-in-parallel.patch \
file://0001-configure.ac-fix-LIBPL.patch \
file://0001-python3-Do-not-hardcode-lib-for-distutils.patch \
file://0020-configure.ac-setup.py-do-not-add-a-curses-include-pa.patch \
file://makerace.patch \
"
SRC_URI_append_class-native = " \
file://0001-distutils-sysconfig-append-STAGING_LIBDIR-python-sys.patch \
file://12-distutils-prefix-is-inside-staging-area.patch \
file://0001-Don-t-search-system-for-headers-libraries.patch \
"
SRC_URI[md5sum] = "9dd8f82e586b776383c82e27923f8795"
SRC_URI[sha256sum] = "b1d3a76420375343b5e8a22fceb1ac65b77193e9ed27146524f0a9db058728ea"
# exclude pre-releases for both python 2.x and 3.x
UPSTREAM_CHECK_REGEX = "[Pp]ython-(?P<pver>\d+(\.\d+)+).tar"
CVE_PRODUCT = "python"
# Upstream consider this expected behaviour
CVE_CHECK_WHITELIST += "CVE-2007-4559"
# This is not exploitable when glibc has CVE-2016-10739 fixed.
CVE_CHECK_WHITELIST += "CVE-2019-18348"
# This is windows only issue.
CVE_CHECK_WHITELIST += "CVE-2020-15523"
PYTHON_MAJMIN = "3.8"
S = "${WORKDIR}/Python-${PV}"
BBCLASSEXTEND = "native nativesdk"
inherit autotools pkgconfig qemu ptest multilib_header update-alternatives
MULTILIB_SUFFIX = "${@d.getVar('base_libdir',1).split('/')[-1]}"
ALTERNATIVE_${PN}-dev = "python3-config"
ALTERNATIVE_LINK_NAME[python3-config] = "${bindir}/python${PYTHON_MAJMIN}-config"
ALTERNATIVE_TARGET[python3-config] = "${bindir}/python${PYTHON_MAJMIN}-config-${MULTILIB_SUFFIX}"
DEPENDS = "bzip2-replacement-native libffi bzip2 openssl sqlite3 zlib virtual/libintl xz virtual/crypt util-linux libtirpc libnsl2 autoconf-archive"
DEPENDS_append_class-target = " python3-native"
DEPENDS_append_class-nativesdk = " python3-native"
EXTRA_OECONF = " --without-ensurepip --enable-shared"
EXTRA_OECONF_append_class-native = " --bindir=${bindir}/${PN}"
export CROSSPYTHONPATH="${STAGING_LIBDIR_NATIVE}/python${PYTHON_MAJMIN}/lib-dynload/"
EXTRANATIVEPATH += "python3-native"
CACHED_CONFIGUREVARS = " \
ac_cv_file__dev_ptmx=yes \
ac_cv_file__dev_ptc=no \
ac_cv_working_tzset=yes \
"
python() {
# PGO currently causes builds to not be reproducible, so disable it for
# now. See YOCTO #13407
if bb.utils.contains('MACHINE_FEATURES', 'qemu-usermode', True, False, d) and d.getVar('BUILD_REPRODUCIBLE_BINARIES') != '1':
d.setVar('PACKAGECONFIG_PGO', 'pgo')
else:
d.setVar('PACKAGECONFIG_PGO', '')
}
PACKAGECONFIG_class-target ??= "readline ${PACKAGECONFIG_PGO} gdbm"
PACKAGECONFIG_class-native ??= "readline gdbm"
PACKAGECONFIG_class-nativesdk ??= "readline gdbm"
PACKAGECONFIG[readline] = ",,readline"
# Use profile guided optimisation by running PyBench inside qemu-user
PACKAGECONFIG[pgo] = "--enable-optimizations,,qemu-native"
PACKAGECONFIG[tk] = ",,tk"
PACKAGECONFIG[gdbm] = ",,gdbm"
do_configure_prepend () {
mkdir -p ${B}/Modules
cat > ${B}/Modules/Setup.local << EOF
*disabled*
${@bb.utils.contains('PACKAGECONFIG', 'gdbm', '', '_gdbm _dbm', d)}
${@bb.utils.contains('PACKAGECONFIG', 'readline', '', 'readline', d)}
EOF
}
CPPFLAGS_append = " -I${STAGING_INCDIR}/ncursesw -I${STAGING_INCDIR}/uuid"
EXTRA_OEMAKE = '\
STAGING_LIBDIR=${STAGING_LIBDIR} \
STAGING_INCDIR=${STAGING_INCDIR} \
LIB=${baselib} \
'
do_compile_prepend_class-target() {
if ${@bb.utils.contains('PACKAGECONFIG', 'pgo', 'true', 'false', d)}; then
qemu_binary="${@qemu_wrapper_cmdline(d, '${STAGING_DIR_TARGET}', ['${B}', '${STAGING_DIR_TARGET}/${base_libdir}'])}"
cat >pgo-wrapper <<EOF
#!/bin/sh
cd ${B}
$qemu_binary "\$@"
EOF
chmod +x pgo-wrapper
fi
}
do_install_prepend() {
${WORKDIR}/check_build_completeness.py ${T}/log.do_compile
}
do_install_append_class-target() {
oe_multilib_header python${PYTHON_MAJMIN}/pyconfig.h
}
do_install_append_class-native() {
# Make sure we use /usr/bin/env python
for PYTHSCRIPT in `grep -rIl ${bindir}/${PN}/python ${D}${bindir}/${PN}`; do
sed -i -e '1s|^#!.*|#!/usr/bin/env python3|' $PYTHSCRIPT
done
# Add a symlink to the native Python so that scripts can just invoke
# "nativepython" and get the right one without needing absolute paths
# (these often end up too long for the #! parser in the kernel as the
# buffer is 128 bytes long).
ln -s python3-native/python3 ${D}${bindir}/nativepython3
}
do_install_append() {
mkdir -p ${D}${libdir}/python-sysconfigdata
sysconfigfile=`find ${D} -name _sysconfig*.py`
cp $sysconfigfile ${D}${libdir}/python-sysconfigdata/_sysconfigdata.py
sed -i \
-e "s,^ 'LIBDIR'.*, 'LIBDIR': '${STAGING_LIBDIR}'\,,g" \
-e "s,^ 'INCLUDEDIR'.*, 'INCLUDEDIR': '${STAGING_INCDIR}'\,,g" \
-e "s,^ 'CONFINCLUDEDIR'.*, 'CONFINCLUDEDIR': '${STAGING_INCDIR}'\,,g" \
-e "/^ 'INCLDIRSTOMAKE'/{N; s,/usr/include,${STAGING_INCDIR},g}" \
-e "/^ 'INCLUDEPY'/s,/usr/include,${STAGING_INCDIR},g" \
${D}${libdir}/python-sysconfigdata/_sysconfigdata.py
}
do_install_append_class-nativesdk () {
create_wrapper ${D}${bindir}/python${PYTHON_MAJMIN} TERMINFO_DIRS='${sysconfdir}/terminfo:/etc/terminfo:/usr/share/terminfo:/usr/share/misc/terminfo:/lib/terminfo' PYTHONNOUSERSITE='1'
}
SSTATE_SCAN_FILES += "Makefile _sysconfigdata.py"
PACKAGE_PREPROCESS_FUNCS += "py_package_preprocess"
py_package_preprocess () {
# Remove references to buildmachine paths in target Makefile and _sysconfigdata
sed -i -e 's:--sysroot=${STAGING_DIR_TARGET}::g' -e s:'--with-libtool-sysroot=${STAGING_DIR_TARGET}'::g \
-e 's|${DEBUG_PREFIX_MAP}||g' \
-e 's:${HOSTTOOLS_DIR}/::g' \
-e 's:${RECIPE_SYSROOT_NATIVE}::g' \
-e 's:${RECIPE_SYSROOT}::g' \
-e 's:${BASE_WORKDIR}/${MULTIMACH_TARGET_SYS}::g' \
${PKGD}/${libdir}/python${PYTHON_MAJMIN}/config-${PYTHON_MAJMIN}${PYTHON_ABI}*/Makefile \
${PKGD}/${libdir}/python${PYTHON_MAJMIN}/_sysconfigdata*.py \
${PKGD}/${bindir}/python${PYTHON_MAJMIN}-config
# Reformat _sysconfigdata after modifying it so that it remains
# reproducible
for c in ${PKGD}/${libdir}/python${PYTHON_MAJMIN}/_sysconfigdata*.py; do
python3 ${WORKDIR}/reformat_sysconfig.py $c
done
# Recompile _sysconfigdata after modifying it
cd ${PKGD}
sysconfigfile=`find . -name _sysconfigdata_*.py`
${STAGING_BINDIR_NATIVE}/python3-native/python3 \
-c "from py_compile import compile; compile('$sysconfigfile')"
${STAGING_BINDIR_NATIVE}/python3-native/python3 \
-c "from py_compile import compile; compile('$sysconfigfile', optimize=1)"
${STAGING_BINDIR_NATIVE}/python3-native/python3 \
-c "from py_compile import compile; compile('$sysconfigfile', optimize=2)"
cd -
mv ${PKGD}/${bindir}/python${PYTHON_MAJMIN}-config ${PKGD}/${bindir}/python${PYTHON_MAJMIN}-config-${MULTILIB_SUFFIX}
#Remove the unneeded copy of target sysconfig data
rm -rf ${PKGD}/${libdir}/python-sysconfigdata
}
# We want bytecode precompiled .py files (.pyc's) by default
# but the user may set it on their own conf
INCLUDE_PYCS ?= "1"
python(){
import collections, json
filename = os.path.join(d.getVar('THISDIR'), 'python3', 'python3-manifest.json')
# This python changes the datastore based on the contents of a file, so mark
# that dependency.
bb.parse.mark_dependency(d, filename)
with open(filename) as manifest_file:
manifest_str = manifest_file.read()
json_start = manifest_str.find('# EOC') + 6
manifest_file.seek(json_start)
manifest_str = manifest_file.read()
python_manifest = json.loads(manifest_str, object_pairs_hook=collections.OrderedDict)
# First set RPROVIDES for -native case
# Hardcoded since it cant be python3-native-foo, should be python3-foo-native
pn = 'python3'
rprovides = d.getVar('RPROVIDES').split()
# ${PN}-misc-native is not in the manifest
rprovides.append(pn + '-misc-native')
for key in python_manifest:
pypackage = pn + '-' + key + '-native'
if pypackage not in rprovides:
rprovides.append(pypackage)
d.setVar('RPROVIDES_class-native', ' '.join(rprovides))
# Then work on the target
include_pycs = d.getVar('INCLUDE_PYCS')
packages = d.getVar('PACKAGES').split()
pn = d.getVar('PN')
newpackages=[]
for key in python_manifest:
pypackage = pn + '-' + key
if pypackage not in packages:
# We need to prepend, otherwise python-misc gets everything
# so we use a new variable
newpackages.append(pypackage)
# "Build" python's manifest FILES, RDEPENDS and SUMMARY
d.setVar('FILES_' + pypackage, '')
for value in python_manifest[key]['files']:
d.appendVar('FILES_' + pypackage, ' ' + value)
# Add cached files
if include_pycs == '1':
for value in python_manifest[key]['cached']:
d.appendVar('FILES_' + pypackage, ' ' + value)
for value in python_manifest[key]['rdepends']:
# Make it work with or without $PN
if '${PN}' in value:
value=value.split('-', 1)[1]
d.appendVar('RDEPENDS_' + pypackage, ' ' + pn + '-' + value)
for value in python_manifest[key].get('rrecommends', ()):
if '${PN}' in value:
value=value.split('-', 1)[1]
d.appendVar('RRECOMMENDS_' + pypackage, ' ' + pn + '-' + value)
d.setVar('SUMMARY_' + pypackage, python_manifest[key]['summary'])
# Prepending so to avoid python-misc getting everything
packages = newpackages + packages
d.setVar('PACKAGES', ' '.join(packages))
d.setVar('ALLOW_EMPTY_${PN}-modules', '1')
d.setVar('ALLOW_EMPTY_${PN}-pkgutil', '1')
}
# Files needed to create a new manifest
do_create_manifest() {
# This task should be run with every new release of Python.
# We must ensure that PACKAGECONFIG enables everything when creating
# a new manifest, this is to base our new manifest on a complete
# native python build, containing all dependencies, otherwise the task
# wont be able to find the required files.
# e.g. BerkeleyDB is an optional build dependency so it may or may not
# be present, we must ensure it is.
cd ${WORKDIR}
# This needs to be executed by python-native and NOT by HOST's python
nativepython3 create_manifest3.py ${PYTHON_MAJMIN}
cp python3-manifest.json.new ${THISDIR}/python3/python3-manifest.json
}
# bitbake python -c create_manifest
# Make sure we have native python ready when we create a new manifest
addtask do_create_manifest after do_patch do_prepare_recipe_sysroot
# manual dependency additions
RRECOMMENDS_${PN}-core_append_class-nativesdk = " nativesdk-python3-modules"
RRECOMMENDS_${PN}-crypt_append_class-target = " openssl ca-certificates"
RRECOMMENDS_${PN}-crypt_append_class-nativesdk = " openssl ca-certificates"
# For historical reasons PN is empty and provided by python3-modules
FILES_${PN} = ""
RPROVIDES_${PN}-modules = "${PN}"
FILES_${PN}-pydoc += "${bindir}/pydoc${PYTHON_MAJMIN} ${bindir}/pydoc3"
FILES_${PN}-idle += "${bindir}/idle3 ${bindir}/idle${PYTHON_MAJMIN}"
# provide python-pyvenv from python3-venv
RPROVIDES_${PN}-venv += "python3-pyvenv"
# package libpython3
PACKAGES =+ "libpython3 libpython3-staticdev"
FILES_libpython3 = "${libdir}/libpython*.so.*"
FILES_libpython3-staticdev += "${libdir}/python${PYTHON_MAJMIN}/config-${PYTHON_MAJMIN}-*/libpython${PYTHON_MAJMIN}.a"
INSANE_SKIP_${PN}-dev += "dev-elf"
INSANE_SKIP_${PN}-ptest += "dev-deps"
# catch all the rest (unsorted)
PACKAGES += "${PN}-misc"
RDEPENDS_${PN}-misc += "python3-core python3-email python3-codecs python3-pydoc python3-pickle python3-audio"
RDEPENDS_${PN}-modules_append_class-target = " python3-misc"
RDEPENDS_${PN}-modules_append_class-nativesdk = " python3-misc"
FILES_${PN}-misc = "${libdir}/python${PYTHON_MAJMIN} ${libdir}/python${PYTHON_MAJMIN}/lib-dynload"
# catch manpage
PACKAGES += "${PN}-man"
FILES_${PN}-man = "${datadir}/man"
# See https://bugs.python.org/issue18748 and https://bugs.python.org/issue37395
RDEPENDS_libpython3_append_libc-glibc = " libgcc"
RDEPENDS_${PN}-ctypes_append_libc-glibc = " ${MLPREFIX}ldconfig"
RDEPENDS_${PN}-ptest = "${PN}-modules ${PN}-tests ${PN}-dev unzip bzip2 libgcc tzdata-europe coreutils sed"
RDEPENDS_${PN}-ptest_append_libc-glibc = " locale-base-tr-tr.iso-8859-9"
RDEPENDS_${PN}-tkinter += "${@bb.utils.contains('PACKAGECONFIG', 'tk', 'tk tk-lib', '', d)}"
RDEPENDS_${PN}-dev = ""
RDEPENDS_${PN}-tests_append_class-target = " bash"
RDEPENDS_${PN}-tests_append_class-nativesdk = " bash"
# Python's tests contain large numbers of files we don't need in the recipe sysroots
SYSROOT_PREPROCESS_FUNCS += " py3_sysroot_cleanup"
py3_sysroot_cleanup () {
rm -rf ${SYSROOT_DESTDIR}${libdir}/python${PYTHON_MAJMIN}/test
}
# Upstream-status: Submitted/Backport
# Upstream-URL: https://lists.openembedded.org/g/openembedded-core/message/158561
# HG changeset patch
# User Marco Bodrato <bodrato@mail.dm.unipi.it>
# Date 1634836009 -7200
# Node ID 561a9c25298e17bb01896801ff353546c6923dbd
# Parent e1fd9db13b475209a864577237ea4b9105b3e96e
mpz/inp_raw.c: Avoid bit size overflows
diff -r e1fd9db13b47 -r 561a9c25298e mpz/inp_raw.c
--- a/mpz/inp_raw.c Tue Dec 22 23:49:51 2020 +0100
+++ b/mpz/inp_raw.c Thu Oct 21 19:06:49 2021 +0200
@@ -88,8 +88,11 @@
abs_csize = ABS (csize);
+ if (UNLIKELY (abs_csize > ~(mp_bitcnt_t) 0 / 8))
+ return 0; /* Bit size overflows */
+
/* round up to a multiple of limbs */
- abs_xsize = BITS_TO_LIMBS (abs_csize*8);
+ abs_xsize = BITS_TO_LIMBS ((mp_bitcnt_t) abs_csize * 8);
if (abs_xsize != 0)
{
# SPDX-FileCopyrightText: Huawei Inc.
#
# SPDX-License-Identifier: Apache-2.0
FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"
SRC_URI += "file://CVE-2021-43618.patch"
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment